188768458SSam Leffler /* $OpenBSD: ip_esp.c,v 1.69 2001/06/26 06:18:59 angelos Exp $ */
2c398230bSWarner Losh /*-
388768458SSam Leffler * The authors of this code are John Ioannidis (ji@tla.org),
488768458SSam Leffler * Angelos D. Keromytis (kermit@csd.uch.gr) and
588768458SSam Leffler * Niels Provos (provos@physnet.uni-hamburg.de).
688768458SSam Leffler *
788768458SSam Leffler * The original version of this code was written by John Ioannidis
888768458SSam Leffler * for BSD/OS in Athens, Greece, in November 1995.
988768458SSam Leffler *
1088768458SSam Leffler * Ported to OpenBSD and NetBSD, with additional transforms, in December 1996,
1188768458SSam Leffler * by Angelos D. Keromytis.
1288768458SSam Leffler *
1388768458SSam Leffler * Additional transforms and features in 1997 and 1998 by Angelos D. Keromytis
1488768458SSam Leffler * and Niels Provos.
1588768458SSam Leffler *
1688768458SSam Leffler * Additional features in 1999 by Angelos D. Keromytis.
1788768458SSam Leffler *
1888768458SSam Leffler * Copyright (C) 1995, 1996, 1997, 1998, 1999 by John Ioannidis,
1988768458SSam Leffler * Angelos D. Keromytis and Niels Provos.
2088768458SSam Leffler * Copyright (c) 2001 Angelos D. Keromytis.
2188768458SSam Leffler *
2288768458SSam Leffler * Permission to use, copy, and modify this software with or without fee
2388768458SSam Leffler * is hereby granted, provided that this entire notice is included in
2488768458SSam Leffler * all copies of any software which is or includes a copy or
2588768458SSam Leffler * modification of this software.
2688768458SSam Leffler * You may use this code under the GNU public license if you so wish. Please
2788768458SSam Leffler * contribute changes back to the authors under this freer than GPL license
2888768458SSam Leffler * so that we may further the use of strong encryption without limitations to
2988768458SSam Leffler * all.
3088768458SSam Leffler *
3188768458SSam Leffler * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
3288768458SSam Leffler * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
3388768458SSam Leffler * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
3488768458SSam Leffler * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
3588768458SSam Leffler * PURPOSE.
3688768458SSam Leffler */
3788768458SSam Leffler #include "opt_inet.h"
3888768458SSam Leffler #include "opt_inet6.h"
3928d2a72bSJohn Baldwin #include "opt_ipsec.h"
4088768458SSam Leffler
4188768458SSam Leffler #include <sys/param.h>
4288768458SSam Leffler #include <sys/systm.h>
4335d9e00dSJohn Baldwin #include <sys/malloc.h>
4488768458SSam Leffler #include <sys/mbuf.h>
4588768458SSam Leffler #include <sys/socket.h>
4688768458SSam Leffler #include <sys/syslog.h>
4788768458SSam Leffler #include <sys/kernel.h>
48eedc7fd9SGleb Smirnoff #include <sys/lock.h>
4988768458SSam Leffler #include <sys/random.h>
50fcf59617SAndrey V. Elsukov #include <sys/mutex.h>
5188768458SSam Leffler #include <sys/sysctl.h>
52a2bc81bfSJohn-Mark Gurney #include <sys/mutex.h>
53a2bc81bfSJohn-Mark Gurney #include <machine/atomic.h>
5488768458SSam Leffler
5588768458SSam Leffler #include <net/if.h>
56eddfbb76SRobert Watson #include <net/vnet.h>
5788768458SSam Leffler
5888768458SSam Leffler #include <netinet/in.h>
5988768458SSam Leffler #include <netinet/in_systm.h>
6088768458SSam Leffler #include <netinet/ip.h>
6188768458SSam Leffler #include <netinet/ip_ecn.h>
6288768458SSam Leffler #include <netinet/ip6.h>
6388768458SSam Leffler
6488768458SSam Leffler #include <netipsec/ipsec.h>
6588768458SSam Leffler #include <netipsec/ah.h>
6688768458SSam Leffler #include <netipsec/ah_var.h>
6788768458SSam Leffler #include <netipsec/esp.h>
6888768458SSam Leffler #include <netipsec/esp_var.h>
6988768458SSam Leffler #include <netipsec/xform.h>
7088768458SSam Leffler
7188768458SSam Leffler #ifdef INET6
7288768458SSam Leffler #include <netinet6/ip6_var.h>
7388768458SSam Leffler #include <netipsec/ipsec6.h>
7488768458SSam Leffler #include <netinet6/ip6_ecn.h>
7588768458SSam Leffler #endif
7688768458SSam Leffler
7788768458SSam Leffler #include <netipsec/key.h>
7888768458SSam Leffler #include <netipsec/key_debug.h>
7988768458SSam Leffler
8088768458SSam Leffler #include <opencrypto/cryptodev.h>
8188768458SSam Leffler #include <opencrypto/xform.h>
8288768458SSam Leffler
834d36d1fdSMarcin Wojtas #define SPI_SIZE 4
844d36d1fdSMarcin Wojtas
85eddfbb76SRobert Watson VNET_DEFINE(int, esp_enable) = 1;
8641106f5aSKonstantin Belousov VNET_DEFINE(int, esp_ctr_compatibility) = 1;
87db8c0879SAndrey V. Elsukov VNET_PCPUSTAT_DEFINE(struct espstat, espstat);
88db8c0879SAndrey V. Elsukov VNET_PCPUSTAT_SYSINIT(espstat);
89db8c0879SAndrey V. Elsukov
90db8c0879SAndrey V. Elsukov #ifdef VIMAGE
91db8c0879SAndrey V. Elsukov VNET_PCPUSTAT_SYSUNINIT(espstat);
92db8c0879SAndrey V. Elsukov #endif /* VIMAGE */
9388768458SSam Leffler
9488768458SSam Leffler SYSCTL_DECL(_net_inet_esp);
956df8a710SGleb Smirnoff SYSCTL_INT(_net_inet_esp, OID_AUTO, esp_enable,
966df8a710SGleb Smirnoff CTLFLAG_VNET | CTLFLAG_RW, &VNET_NAME(esp_enable), 0, "");
97b01edfb5SMarcin Wojtas SYSCTL_INT(_net_inet_esp, OID_AUTO, ctr_compatibility,
98b01edfb5SMarcin Wojtas CTLFLAG_VNET | CTLFLAG_RW, &VNET_NAME(esp_ctr_compatibility), 0,
99b01edfb5SMarcin Wojtas "Align AES-CTR encrypted transmitted frames to blocksize");
100db8c0879SAndrey V. Elsukov SYSCTL_VNET_PCPUSTAT(_net_inet_esp, IPSECCTL_STATS, stats,
101db8c0879SAndrey V. Elsukov struct espstat, espstat,
102db8c0879SAndrey V. Elsukov "ESP statistics (struct espstat, netipsec/esp_var.h");
103eddfbb76SRobert Watson
10435d9e00dSJohn Baldwin static MALLOC_DEFINE(M_ESP, "esp", "IPsec ESP");
10535d9e00dSJohn Baldwin
10688768458SSam Leffler static int esp_input_cb(struct cryptop *op);
10788768458SSam Leffler static int esp_output_cb(struct cryptop *crp);
108bfe1aba4SMarko Zec
10988768458SSam Leffler size_t
esp_hdrsiz(struct secasvar * sav)11088768458SSam Leffler esp_hdrsiz(struct secasvar *sav)
11188768458SSam Leffler {
11288768458SSam Leffler size_t size;
11388768458SSam Leffler
11488768458SSam Leffler if (sav != NULL) {
11588768458SSam Leffler /*XXX not right for null algorithm--does it matter??*/
1169ffa9677SSam Leffler IPSEC_ASSERT(sav->tdb_encalgxform != NULL,
1179ffa9677SSam Leffler ("SA with null xform"));
11888768458SSam Leffler if (sav->flags & SADB_X_EXT_OLD)
11988768458SSam Leffler size = sizeof (struct esp);
12088768458SSam Leffler else
12188768458SSam Leffler size = sizeof (struct newesp);
12288768458SSam Leffler size += sav->tdb_encalgxform->blocksize + 9;
12388768458SSam Leffler /*XXX need alg check???*/
12488768458SSam Leffler if (sav->tdb_authalgxform != NULL && sav->replay)
12588768458SSam Leffler size += ah_hdrsiz(sav);
12688768458SSam Leffler } else {
12788768458SSam Leffler /*
12888768458SSam Leffler * base header size
12988768458SSam Leffler * + max iv length for CBC mode
13088768458SSam Leffler * + max pad length
13188768458SSam Leffler * + sizeof (pad length field)
13288768458SSam Leffler * + sizeof (next header field)
13388768458SSam Leffler * + max icv supported.
13488768458SSam Leffler */
135cdb7ebe3SPawel Jakub Dawidek size = sizeof (struct newesp) + EALG_MAX_BLOCK_LEN + 9 + 16;
13688768458SSam Leffler }
13788768458SSam Leffler return size;
13888768458SSam Leffler }
13988768458SSam Leffler
14088768458SSam Leffler /*
14188768458SSam Leffler * esp_init() is called when an SPI is being set up.
14288768458SSam Leffler */
14388768458SSam Leffler static int
esp_init(struct secasvar * sav,struct xformsw * xsp)14488768458SSam Leffler esp_init(struct secasvar *sav, struct xformsw *xsp)
14588768458SSam Leffler {
146fcf59617SAndrey V. Elsukov const struct enc_xform *txform;
147c0341432SJohn Baldwin struct crypto_session_params csp;
14888768458SSam Leffler int keylen;
14988768458SSam Leffler int error;
15088768458SSam Leffler
151fcf59617SAndrey V. Elsukov txform = enc_algorithm_lookup(sav->alg_enc);
15288768458SSam Leffler if (txform == NULL) {
1539ffa9677SSam Leffler DPRINTF(("%s: unsupported encryption algorithm %d\n",
1549ffa9677SSam Leffler __func__, sav->alg_enc));
15588768458SSam Leffler return EINVAL;
15688768458SSam Leffler }
15788768458SSam Leffler if (sav->key_enc == NULL) {
1589ffa9677SSam Leffler DPRINTF(("%s: no encoding key for %s algorithm\n",
1599ffa9677SSam Leffler __func__, txform->name));
16088768458SSam Leffler return EINVAL;
16188768458SSam Leffler }
162a2bc81bfSJohn-Mark Gurney if ((sav->flags & (SADB_X_EXT_OLD | SADB_X_EXT_IV4B)) ==
163a2bc81bfSJohn-Mark Gurney SADB_X_EXT_IV4B) {
1649ffa9677SSam Leffler DPRINTF(("%s: 4-byte IV not supported with protocol\n",
1659ffa9677SSam Leffler __func__));
16688768458SSam Leffler return EINVAL;
16788768458SSam Leffler }
168c2fd516fSJohn Baldwin
169a2bc81bfSJohn-Mark Gurney /* subtract off the salt, RFC4106, 8.1 and RFC3686, 5.1 */
1709f8f3a8eSKristof Provost keylen = _KEYLEN(sav->key_enc) - SAV_ISCTRORGCM(sav) * 4 -
1719f8f3a8eSKristof Provost SAV_ISCHACHA(sav) * 4;
17288768458SSam Leffler if (txform->minkey > keylen || keylen > txform->maxkey) {
1739ffa9677SSam Leffler DPRINTF(("%s: invalid key length %u, must be in the range "
1749ffa9677SSam Leffler "[%u..%u] for algorithm %s\n", __func__,
17588768458SSam Leffler keylen, txform->minkey, txform->maxkey,
17688768458SSam Leffler txform->name));
17788768458SSam Leffler return EINVAL;
17888768458SSam Leffler }
17988768458SSam Leffler
1809f8f3a8eSKristof Provost if (SAV_ISCTRORGCM(sav) || SAV_ISCHACHA(sav))
181a2bc81bfSJohn-Mark Gurney sav->ivlen = 8; /* RFC4106 3.1 and RFC3686 3.1 */
182a2bc81bfSJohn-Mark Gurney else
1830c80e7dfSAndrey V. Elsukov sav->ivlen = txform->ivsize;
18488768458SSam Leffler
185c0341432SJohn Baldwin memset(&csp, 0, sizeof(csp));
186c0341432SJohn Baldwin
18788768458SSam Leffler /*
18888768458SSam Leffler * Setup AH-related state.
18988768458SSam Leffler */
19088768458SSam Leffler if (sav->alg_auth != 0) {
191c0341432SJohn Baldwin error = ah_init0(sav, xsp, &csp);
19288768458SSam Leffler if (error)
19388768458SSam Leffler return error;
19488768458SSam Leffler }
19588768458SSam Leffler
19688768458SSam Leffler /* NB: override anything set in ah_init0 */
19788768458SSam Leffler sav->tdb_xform = xsp;
19888768458SSam Leffler sav->tdb_encalgxform = txform;
19988768458SSam Leffler
20016de9ac1SGeorge V. Neville-Neil /*
20116de9ac1SGeorge V. Neville-Neil * Whenever AES-GCM is used for encryption, one
20216de9ac1SGeorge V. Neville-Neil * of the AES authentication algorithms is chosen
20316de9ac1SGeorge V. Neville-Neil * as well, based on the key size.
20416de9ac1SGeorge V. Neville-Neil */
20516de9ac1SGeorge V. Neville-Neil if (sav->alg_enc == SADB_X_EALG_AESGCM16) {
20616de9ac1SGeorge V. Neville-Neil switch (keylen) {
207a2bc81bfSJohn-Mark Gurney case AES_128_GMAC_KEY_LEN:
20816de9ac1SGeorge V. Neville-Neil sav->alg_auth = SADB_X_AALG_AES128GMAC;
20916de9ac1SGeorge V. Neville-Neil sav->tdb_authalgxform = &auth_hash_nist_gmac_aes_128;
21016de9ac1SGeorge V. Neville-Neil break;
211a2bc81bfSJohn-Mark Gurney case AES_192_GMAC_KEY_LEN:
21216de9ac1SGeorge V. Neville-Neil sav->alg_auth = SADB_X_AALG_AES192GMAC;
21316de9ac1SGeorge V. Neville-Neil sav->tdb_authalgxform = &auth_hash_nist_gmac_aes_192;
21416de9ac1SGeorge V. Neville-Neil break;
215a2bc81bfSJohn-Mark Gurney case AES_256_GMAC_KEY_LEN:
21616de9ac1SGeorge V. Neville-Neil sav->alg_auth = SADB_X_AALG_AES256GMAC;
21716de9ac1SGeorge V. Neville-Neil sav->tdb_authalgxform = &auth_hash_nist_gmac_aes_256;
21816de9ac1SGeorge V. Neville-Neil break;
21916de9ac1SGeorge V. Neville-Neil default:
22016de9ac1SGeorge V. Neville-Neil DPRINTF(("%s: invalid key length %u"
22116de9ac1SGeorge V. Neville-Neil "for algorithm %s\n", __func__,
22216de9ac1SGeorge V. Neville-Neil keylen, txform->name));
22316de9ac1SGeorge V. Neville-Neil return EINVAL;
22416de9ac1SGeorge V. Neville-Neil }
225c0341432SJohn Baldwin csp.csp_mode = CSP_MODE_AEAD;
2264d36d1fdSMarcin Wojtas if (sav->flags & SADB_X_SAFLAGS_ESN)
2274d36d1fdSMarcin Wojtas csp.csp_flags |= CSP_F_SEPARATE_AAD;
2289f8f3a8eSKristof Provost } else if (sav->alg_enc == SADB_X_EALG_CHACHA20POLY1305) {
2299f8f3a8eSKristof Provost sav->alg_auth = SADB_X_AALG_CHACHA20POLY1305;
2309f8f3a8eSKristof Provost sav->tdb_authalgxform = &auth_hash_poly1305;
2319f8f3a8eSKristof Provost csp.csp_mode = CSP_MODE_AEAD;
2329f8f3a8eSKristof Provost if (sav->flags & SADB_X_SAFLAGS_ESN)
2339f8f3a8eSKristof Provost csp.csp_flags |= CSP_F_SEPARATE_AAD;
2344d36d1fdSMarcin Wojtas } else if (sav->alg_auth != 0) {
235c0341432SJohn Baldwin csp.csp_mode = CSP_MODE_ETA;
2364d36d1fdSMarcin Wojtas if (sav->flags & SADB_X_SAFLAGS_ESN)
2374d36d1fdSMarcin Wojtas csp.csp_flags |= CSP_F_ESN;
2384d36d1fdSMarcin Wojtas } else
239c0341432SJohn Baldwin csp.csp_mode = CSP_MODE_CIPHER;
24016de9ac1SGeorge V. Neville-Neil
24188768458SSam Leffler /* Initialize crypto session. */
242c0341432SJohn Baldwin csp.csp_cipher_alg = sav->tdb_encalgxform->type;
243897e4312SJohn Baldwin if (csp.csp_cipher_alg != CRYPTO_NULL_CBC) {
244c0341432SJohn Baldwin csp.csp_cipher_key = sav->key_enc->key_data;
245c0341432SJohn Baldwin csp.csp_cipher_klen = _KEYBITS(sav->key_enc) / 8 -
2469f8f3a8eSKristof Provost SAV_ISCTRORGCM(sav) * 4 - SAV_ISCHACHA(sav) * 4;
247897e4312SJohn Baldwin };
248c0341432SJohn Baldwin csp.csp_ivlen = txform->ivsize;
24988768458SSam Leffler
250c0341432SJohn Baldwin error = crypto_newsession(&sav->tdb_cryptoid, &csp, V_crypto_support);
25188768458SSam Leffler return error;
25288768458SSam Leffler }
25388768458SSam Leffler
254dae61c9dSJohn Baldwin static void
esp_cleanup(struct secasvar * sav)255dae61c9dSJohn Baldwin esp_cleanup(struct secasvar *sav)
25688768458SSam Leffler {
25788768458SSam Leffler
258dae61c9dSJohn Baldwin crypto_freesession(sav->tdb_cryptoid);
259dae61c9dSJohn Baldwin sav->tdb_cryptoid = NULL;
260dae61c9dSJohn Baldwin sav->tdb_authalgxform = NULL;
26188768458SSam Leffler sav->tdb_encalgxform = NULL;
26288768458SSam Leffler }
26388768458SSam Leffler
26488768458SSam Leffler /*
26588768458SSam Leffler * ESP input processing, called (eventually) through the protocol switch.
26688768458SSam Leffler */
26788768458SSam Leffler static int
esp_input(struct mbuf * m,struct secasvar * sav,int skip,int protoff)26888768458SSam Leffler esp_input(struct mbuf *m, struct secasvar *sav, int skip, int protoff)
26988768458SSam Leffler {
2707f1f6591SAndrey V. Elsukov IPSEC_DEBUG_DECLARE(char buf[128]);
271fcf59617SAndrey V. Elsukov const struct auth_hash *esph;
272fcf59617SAndrey V. Elsukov const struct enc_xform *espx;
273fcf59617SAndrey V. Elsukov struct xform_data *xd;
27488768458SSam Leffler struct cryptop *crp;
275fcf59617SAndrey V. Elsukov struct newesp *esp;
276fcf59617SAndrey V. Elsukov uint8_t *ivp;
2772e08e39fSConrad Meyer crypto_session_t cryptoid;
2785f7c516fSAndrey V. Elsukov int alen, error, hlen, plen;
2798b7f3994SMarcin Wojtas uint32_t seqh;
2804d36d1fdSMarcin Wojtas const struct crypto_session_params *csp;
28188768458SSam Leffler
2820361f165SKristof Provost SECASVAR_RLOCK_TRACKER;
2830361f165SKristof Provost
2849ffa9677SSam Leffler IPSEC_ASSERT(sav != NULL, ("null SA"));
2859ffa9677SSam Leffler IPSEC_ASSERT(sav->tdb_encalgxform != NULL, ("null encoding xform"));
286a45bff04SVANHULLEBUS Yvan
2875f7c516fSAndrey V. Elsukov error = EINVAL;
288a45bff04SVANHULLEBUS Yvan /* Valid IP Packet length ? */
289a45bff04SVANHULLEBUS Yvan if ( (skip&3) || (m->m_pkthdr.len&3) ){
290a45bff04SVANHULLEBUS Yvan DPRINTF(("%s: misaligned packet, skip %u pkt len %u",
291a45bff04SVANHULLEBUS Yvan __func__, skip, m->m_pkthdr.len));
292a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_badilen);
2935f7c516fSAndrey V. Elsukov goto bad;
294a45bff04SVANHULLEBUS Yvan }
29563abacc2SBjoern A. Zeeb
296a4adf6ccSBjoern A. Zeeb if (m->m_len < skip + sizeof(*esp)) {
29763abacc2SBjoern A. Zeeb m = m_pullup(m, skip + sizeof(*esp));
29863abacc2SBjoern A. Zeeb if (m == NULL) {
29963abacc2SBjoern A. Zeeb DPRINTF(("%s: cannot pullup header\n", __func__));
30063abacc2SBjoern A. Zeeb ESPSTAT_INC(esps_hdrops); /*XXX*/
30163abacc2SBjoern A. Zeeb error = ENOBUFS;
30263abacc2SBjoern A. Zeeb goto bad;
30363abacc2SBjoern A. Zeeb }
304a4adf6ccSBjoern A. Zeeb }
30563abacc2SBjoern A. Zeeb esp = (struct newesp *)(mtod(m, caddr_t) + skip);
30688768458SSam Leffler
30788768458SSam Leffler esph = sav->tdb_authalgxform;
30888768458SSam Leffler espx = sav->tdb_encalgxform;
30988768458SSam Leffler
310a09a7146SJohn-Mark Gurney /* Determine the ESP header and auth length */
31188768458SSam Leffler if (sav->flags & SADB_X_EXT_OLD)
31288768458SSam Leffler hlen = sizeof (struct esp) + sav->ivlen;
31388768458SSam Leffler else
31488768458SSam Leffler hlen = sizeof (struct newesp) + sav->ivlen;
315a09a7146SJohn-Mark Gurney
316a09a7146SJohn-Mark Gurney alen = xform_ah_authsize(esph);
31788768458SSam Leffler
31888768458SSam Leffler /*
31988768458SSam Leffler * Verify payload length is multiple of encryption algorithm
32088768458SSam Leffler * block size.
32188768458SSam Leffler *
32288768458SSam Leffler * NB: This works for the null algorithm because the blocksize
32388768458SSam Leffler * is 4 and all packets must be 4-byte aligned regardless
32488768458SSam Leffler * of the algorithm.
32588768458SSam Leffler */
32688768458SSam Leffler plen = m->m_pkthdr.len - (skip + hlen + alen);
32788768458SSam Leffler if ((plen & (espx->blocksize - 1)) || (plen <= 0)) {
3289ffa9677SSam Leffler DPRINTF(("%s: payload of %d octets not a multiple of %d octets,"
329a2bc81bfSJohn-Mark Gurney " SA %s/%08lx\n", __func__, plen, espx->blocksize,
330a2bc81bfSJohn-Mark Gurney ipsec_address(&sav->sah->saidx.dst, buf, sizeof(buf)),
331a2bc81bfSJohn-Mark Gurney (u_long)ntohl(sav->spi)));
332a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_badilen);
3335f7c516fSAndrey V. Elsukov goto bad;
33488768458SSam Leffler }
33588768458SSam Leffler
33688768458SSam Leffler /*
33788768458SSam Leffler * Check sequence number.
33888768458SSam Leffler */
3390361f165SKristof Provost SECASVAR_RLOCK(sav);
340fcf59617SAndrey V. Elsukov if (esph != NULL && sav->replay != NULL && sav->replay->wsize != 0) {
3418b7f3994SMarcin Wojtas if (ipsec_chkreplay(ntohl(esp->esp_seq), &seqh, sav) == 0) {
3420361f165SKristof Provost SECASVAR_RUNLOCK(sav);
3439ffa9677SSam Leffler DPRINTF(("%s: packet replay check for %s\n", __func__,
344fcf59617SAndrey V. Elsukov ipsec_sa2str(sav, buf, sizeof(buf))));
345a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_replay);
3465f7c516fSAndrey V. Elsukov error = EACCES;
3475f7c516fSAndrey V. Elsukov goto bad;
34888768458SSam Leffler }
3494d36d1fdSMarcin Wojtas seqh = htonl(seqh);
350fcf59617SAndrey V. Elsukov }
351fcf59617SAndrey V. Elsukov cryptoid = sav->tdb_cryptoid;
3520361f165SKristof Provost SECASVAR_RUNLOCK(sav);
35388768458SSam Leffler
35488768458SSam Leffler /* Update the counters */
355a04d64d8SAndrey V. Elsukov ESPSTAT_ADD(esps_ibytes, m->m_pkthdr.len - (skip + hlen + alen));
35688768458SSam Leffler
35788768458SSam Leffler /* Get crypto descriptors */
358c0341432SJohn Baldwin crp = crypto_getreq(cryptoid, M_NOWAIT);
35988768458SSam Leffler if (crp == NULL) {
3609ffa9677SSam Leffler DPRINTF(("%s: failed to acquire crypto descriptors\n",
3619ffa9677SSam Leffler __func__));
362a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_crypto);
3635f7c516fSAndrey V. Elsukov error = ENOBUFS;
3645f7c516fSAndrey V. Elsukov goto bad;
36588768458SSam Leffler }
36688768458SSam Leffler
36788768458SSam Leffler /* Get IPsec-specific opaque pointer */
36835d9e00dSJohn Baldwin xd = malloc(sizeof(*xd), M_ESP, M_NOWAIT | M_ZERO);
369fcf59617SAndrey V. Elsukov if (xd == NULL) {
370fcf59617SAndrey V. Elsukov DPRINTF(("%s: failed to allocate xform_data\n", __func__));
3714d36d1fdSMarcin Wojtas goto xd_fail;
37288768458SSam Leffler }
37388768458SSam Leffler
37408537f45SAndrey V. Elsukov if (esph != NULL) {
375c0341432SJohn Baldwin crp->crp_op = CRYPTO_OP_VERIFY_DIGEST;
3769f8f3a8eSKristof Provost if (SAV_ISGCM(sav) || SAV_ISCHACHA(sav))
377c0341432SJohn Baldwin crp->crp_aad_length = 8; /* RFC4106 5, SPI + SN */
37816de9ac1SGeorge V. Neville-Neil else
379c0341432SJohn Baldwin crp->crp_aad_length = hlen;
3804d36d1fdSMarcin Wojtas
3814d36d1fdSMarcin Wojtas csp = crypto_get_params(crp->crp_session);
3824d36d1fdSMarcin Wojtas if ((csp->csp_flags & CSP_F_SEPARATE_AAD) &&
3834d36d1fdSMarcin Wojtas (sav->replay != NULL) && (sav->replay->wsize != 0)) {
3844d36d1fdSMarcin Wojtas int aad_skip;
3854d36d1fdSMarcin Wojtas
3864d36d1fdSMarcin Wojtas crp->crp_aad_length += sizeof(seqh);
38735d9e00dSJohn Baldwin crp->crp_aad = malloc(crp->crp_aad_length, M_ESP, M_NOWAIT);
3884d36d1fdSMarcin Wojtas if (crp->crp_aad == NULL) {
3894d36d1fdSMarcin Wojtas DPRINTF(("%s: failed to allocate xform_data\n",
3904d36d1fdSMarcin Wojtas __func__));
3914d36d1fdSMarcin Wojtas goto crp_aad_fail;
3924d36d1fdSMarcin Wojtas }
3934d36d1fdSMarcin Wojtas
3944d36d1fdSMarcin Wojtas /* SPI */
3954d36d1fdSMarcin Wojtas m_copydata(m, skip, SPI_SIZE, crp->crp_aad);
3964d36d1fdSMarcin Wojtas aad_skip = SPI_SIZE;
3974d36d1fdSMarcin Wojtas
3984d36d1fdSMarcin Wojtas /* ESN */
3994d36d1fdSMarcin Wojtas bcopy(&seqh, (char *)crp->crp_aad + aad_skip, sizeof(seqh));
4004d36d1fdSMarcin Wojtas aad_skip += sizeof(seqh);
4014d36d1fdSMarcin Wojtas
4024d36d1fdSMarcin Wojtas /* Rest of aad */
4034d36d1fdSMarcin Wojtas if (crp->crp_aad_length - aad_skip > 0)
4044d36d1fdSMarcin Wojtas m_copydata(m, skip + SPI_SIZE,
4054d36d1fdSMarcin Wojtas crp->crp_aad_length - aad_skip,
4064d36d1fdSMarcin Wojtas (char *)crp->crp_aad + aad_skip);
4074d36d1fdSMarcin Wojtas } else
4084d36d1fdSMarcin Wojtas crp->crp_aad_start = skip;
4094d36d1fdSMarcin Wojtas
4104d36d1fdSMarcin Wojtas if (csp->csp_flags & CSP_F_ESN &&
4114d36d1fdSMarcin Wojtas sav->replay != NULL && sav->replay->wsize != 0)
4124d36d1fdSMarcin Wojtas memcpy(crp->crp_esn, &seqh, sizeof(seqh));
4134d36d1fdSMarcin Wojtas
414c0341432SJohn Baldwin crp->crp_digest_start = m->m_pkthdr.len - alen;
41588768458SSam Leffler }
41688768458SSam Leffler
41788768458SSam Leffler /* Crypto operation descriptor */
418c0341432SJohn Baldwin crp->crp_flags = CRYPTO_F_CBIFSYNC;
4199c0e3d3aSJohn Baldwin crypto_use_mbuf(crp, m);
42088768458SSam Leffler crp->crp_callback = esp_input_cb;
421c0341432SJohn Baldwin crp->crp_opaque = xd;
42288768458SSam Leffler
42388768458SSam Leffler /* These are passed as-is to the callback */
424fcf59617SAndrey V. Elsukov xd->sav = sav;
425fcf59617SAndrey V. Elsukov xd->protoff = protoff;
426fcf59617SAndrey V. Elsukov xd->skip = skip;
427fcf59617SAndrey V. Elsukov xd->cryptoid = cryptoid;
428fd40ecf3SJohn Baldwin xd->vnet = curvnet;
42988768458SSam Leffler
43088768458SSam Leffler /* Decryption descriptor */
431c0341432SJohn Baldwin crp->crp_op |= CRYPTO_OP_DECRYPT;
432c0341432SJohn Baldwin crp->crp_payload_start = skip + hlen;
433c0341432SJohn Baldwin crp->crp_payload_length = m->m_pkthdr.len - (skip + hlen + alen);
43488768458SSam Leffler
435c161c46dSJohn Baldwin /* Generate or read cipher IV. */
4369f8f3a8eSKristof Provost if (SAV_ISCTRORGCM(sav) || SAV_ISCHACHA(sav)) {
437c0341432SJohn Baldwin ivp = &crp->crp_iv[0];
43816de9ac1SGeorge V. Neville-Neil
439c161c46dSJohn Baldwin /*
440c161c46dSJohn Baldwin * AES-GCM and AES-CTR use similar cipher IV formats
441c161c46dSJohn Baldwin * defined in RFC 4106 section 4 and RFC 3686 section
442c161c46dSJohn Baldwin * 4, respectively.
443c161c46dSJohn Baldwin *
444c161c46dSJohn Baldwin * The first 4 bytes of the cipher IV contain an
445c161c46dSJohn Baldwin * implicit salt, or nonce, obtained from the last 4
446c161c46dSJohn Baldwin * bytes of the encryption key. The next 8 bytes hold
447c161c46dSJohn Baldwin * an explicit IV unique to each packet. This
448c161c46dSJohn Baldwin * explicit IV is used as the ESP IV for the packet.
449c161c46dSJohn Baldwin * The last 4 bytes hold a big-endian block counter
450c161c46dSJohn Baldwin * incremented for each block. For AES-GCM, the block
451c161c46dSJohn Baldwin * counter's initial value is defined as part of the
452c161c46dSJohn Baldwin * algorithm. For AES-CTR, the block counter's
453c161c46dSJohn Baldwin * initial value for each packet is defined as 1 by
454c161c46dSJohn Baldwin * RFC 3686.
455c161c46dSJohn Baldwin *
456c161c46dSJohn Baldwin * ------------------------------------------
457c161c46dSJohn Baldwin * | Salt | Explicit ESP IV | Block Counter |
458c161c46dSJohn Baldwin * ------------------------------------------
459c161c46dSJohn Baldwin * 4 bytes 8 bytes 4 bytes
460c161c46dSJohn Baldwin */
461a2bc81bfSJohn-Mark Gurney memcpy(ivp, sav->key_enc->key_data +
462a2bc81bfSJohn-Mark Gurney _KEYLEN(sav->key_enc) - 4, 4);
463c161c46dSJohn Baldwin m_copydata(m, skip + hlen - sav->ivlen, sav->ivlen, &ivp[4]);
464a2bc81bfSJohn-Mark Gurney if (SAV_ISCTR(sav)) {
465a2bc81bfSJohn-Mark Gurney be32enc(&ivp[sav->ivlen + 4], 1);
466a2bc81bfSJohn-Mark Gurney }
467c0341432SJohn Baldwin crp->crp_flags |= CRYPTO_F_IV_SEPARATE;
468c0341432SJohn Baldwin } else if (sav->ivlen != 0)
469c0341432SJohn Baldwin crp->crp_iv_start = skip + hlen - sav->ivlen;
47088768458SSam Leffler
47168f6800cSMark Johnston if (V_async_crypto)
47268f6800cSMark Johnston return (crypto_dispatch_async(crp, CRYPTO_ASYNC_ORDERED));
47368f6800cSMark Johnston else
47408537f45SAndrey V. Elsukov return (crypto_dispatch(crp));
4754d36d1fdSMarcin Wojtas
4764d36d1fdSMarcin Wojtas crp_aad_fail:
47735d9e00dSJohn Baldwin free(xd, M_ESP);
4784d36d1fdSMarcin Wojtas xd_fail:
4794d36d1fdSMarcin Wojtas crypto_freereq(crp);
4804d36d1fdSMarcin Wojtas ESPSTAT_INC(esps_crypto);
4814d36d1fdSMarcin Wojtas error = ENOBUFS;
4825f7c516fSAndrey V. Elsukov bad:
4835f7c516fSAndrey V. Elsukov m_freem(m);
4845f7c516fSAndrey V. Elsukov key_freesav(&sav);
4855f7c516fSAndrey V. Elsukov return (error);
48688768458SSam Leffler }
48788768458SSam Leffler
48888768458SSam Leffler /*
48988768458SSam Leffler * ESP input callback from the crypto driver.
49088768458SSam Leffler */
49188768458SSam Leffler static int
esp_input_cb(struct cryptop * crp)49288768458SSam Leffler esp_input_cb(struct cryptop *crp)
49388768458SSam Leffler {
4947f1f6591SAndrey V. Elsukov IPSEC_DEBUG_DECLARE(char buf[128]);
495c0341432SJohn Baldwin uint8_t lastthree[3];
496fcf59617SAndrey V. Elsukov const struct auth_hash *esph;
49788768458SSam Leffler struct mbuf *m;
498fcf59617SAndrey V. Elsukov struct xform_data *xd;
49988768458SSam Leffler struct secasvar *sav;
50088768458SSam Leffler struct secasindex *saidx;
5012e08e39fSConrad Meyer crypto_session_t cryptoid;
502fcf59617SAndrey V. Elsukov int hlen, skip, protoff, error, alen;
50388768458SSam Leffler
5040361f165SKristof Provost SECASVAR_RLOCK_TRACKER;
5050361f165SKristof Provost
5069c0e3d3aSJohn Baldwin m = crp->crp_buf.cb_mbuf;
507c0341432SJohn Baldwin xd = crp->crp_opaque;
508fd40ecf3SJohn Baldwin CURVNET_SET(xd->vnet);
509fcf59617SAndrey V. Elsukov sav = xd->sav;
5101a56620bSKonstantin Belousov if (sav->state >= SADB_SASTATE_DEAD) {
5111a56620bSKonstantin Belousov /* saidx is freed */
5121a56620bSKonstantin Belousov DPRINTF(("%s: dead SA %p spi %#x\n", __func__, sav, sav->spi));
5131a56620bSKonstantin Belousov ESPSTAT_INC(esps_notdb);
5141a56620bSKonstantin Belousov error = ESRCH;
5151a56620bSKonstantin Belousov goto bad;
5161a56620bSKonstantin Belousov }
517fcf59617SAndrey V. Elsukov skip = xd->skip;
518fcf59617SAndrey V. Elsukov protoff = xd->protoff;
519fcf59617SAndrey V. Elsukov cryptoid = xd->cryptoid;
52088768458SSam Leffler saidx = &sav->sah->saidx;
52188768458SSam Leffler esph = sav->tdb_authalgxform;
52288768458SSam Leffler
52388768458SSam Leffler /* Check for crypto errors */
52488768458SSam Leffler if (crp->crp_etype) {
525fcf59617SAndrey V. Elsukov if (crp->crp_etype == EAGAIN) {
52688768458SSam Leffler /* Reset the session ID */
5271b0909d5SConrad Meyer if (ipsec_updateid(sav, &crp->crp_session, &cryptoid) != 0)
528fcf59617SAndrey V. Elsukov crypto_freesession(cryptoid);
5291b0909d5SConrad Meyer xd->cryptoid = crp->crp_session;
530fd40ecf3SJohn Baldwin CURVNET_RESTORE();
5310a95a08eSPawel Jakub Dawidek return (crypto_dispatch(crp));
532fcf59617SAndrey V. Elsukov }
533c0341432SJohn Baldwin
534c0341432SJohn Baldwin /* EBADMSG indicates authentication failure. */
535c0341432SJohn Baldwin if (!(crp->crp_etype == EBADMSG && esph != NULL)) {
536a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_noxform);
537c0341432SJohn Baldwin DPRINTF(("%s: crypto error %d\n", __func__,
538c0341432SJohn Baldwin crp->crp_etype));
53988768458SSam Leffler error = crp->crp_etype;
54088768458SSam Leffler goto bad;
54188768458SSam Leffler }
542c0341432SJohn Baldwin }
54388768458SSam Leffler
54488768458SSam Leffler /* Shouldn't happen... */
54588768458SSam Leffler if (m == NULL) {
546a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_crypto);
5479ffa9677SSam Leffler DPRINTF(("%s: bogus returned buffer from crypto\n", __func__));
54888768458SSam Leffler error = EINVAL;
54988768458SSam Leffler goto bad;
55088768458SSam Leffler }
551*b1c3a4d7SKristof Provost ESPSTAT_INC2(esps_hist, sav->alg_enc);
55288768458SSam Leffler
55388768458SSam Leffler /* If authentication was performed, check now. */
55488768458SSam Leffler if (esph != NULL) {
555a09a7146SJohn-Mark Gurney alen = xform_ah_authsize(esph);
556*b1c3a4d7SKristof Provost AHSTAT_INC2(ahs_hist, sav->alg_auth);
557c0341432SJohn Baldwin if (crp->crp_etype == EBADMSG) {
55808537f45SAndrey V. Elsukov DPRINTF(("%s: authentication hash mismatch for "
55908537f45SAndrey V. Elsukov "packet in SA %s/%08lx\n", __func__,
560962ac6c7SAndrey V. Elsukov ipsec_address(&saidx->dst, buf, sizeof(buf)),
56188768458SSam Leffler (u_long) ntohl(sav->spi)));
562a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_badauth);
56388768458SSam Leffler error = EACCES;
56488768458SSam Leffler goto bad;
56588768458SSam Leffler }
566fcf59617SAndrey V. Elsukov m->m_flags |= M_AUTHIPDGM;
56788768458SSam Leffler /* Remove trailing authenticator */
568442da28aSVANHULLEBUS Yvan m_adj(m, -alen);
56988768458SSam Leffler }
57088768458SSam Leffler
57188768458SSam Leffler /* Release the crypto descriptors */
57235d9e00dSJohn Baldwin free(xd, M_ESP), xd = NULL;
57335d9e00dSJohn Baldwin free(crp->crp_aad, M_ESP), crp->crp_aad = NULL;
57488768458SSam Leffler crypto_freereq(crp), crp = NULL;
57588768458SSam Leffler
57688768458SSam Leffler /*
57788768458SSam Leffler * Packet is now decrypted.
57888768458SSam Leffler */
57988768458SSam Leffler m->m_flags |= M_DECRYPTED;
58088768458SSam Leffler
581d16f6f50SColin Percival /*
582d16f6f50SColin Percival * Update replay sequence number, if appropriate.
583d16f6f50SColin Percival */
584d16f6f50SColin Percival if (sav->replay) {
585d16f6f50SColin Percival u_int32_t seq;
586d16f6f50SColin Percival
587d16f6f50SColin Percival m_copydata(m, skip + offsetof(struct newesp, esp_seq),
588d16f6f50SColin Percival sizeof (seq), (caddr_t) &seq);
5890361f165SKristof Provost SECASVAR_RLOCK(sav);
590d16f6f50SColin Percival if (ipsec_updatereplay(ntohl(seq), sav)) {
5910361f165SKristof Provost SECASVAR_RUNLOCK(sav);
592d16f6f50SColin Percival DPRINTF(("%s: packet replay check for %s\n", __func__,
593fcf59617SAndrey V. Elsukov ipsec_sa2str(sav, buf, sizeof(buf))));
594a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_replay);
595fcf59617SAndrey V. Elsukov error = EACCES;
596d16f6f50SColin Percival goto bad;
597d16f6f50SColin Percival }
5980361f165SKristof Provost SECASVAR_RUNLOCK(sav);
599d16f6f50SColin Percival }
600d16f6f50SColin Percival
60188768458SSam Leffler /* Determine the ESP header length */
60288768458SSam Leffler if (sav->flags & SADB_X_EXT_OLD)
60388768458SSam Leffler hlen = sizeof (struct esp) + sav->ivlen;
60488768458SSam Leffler else
60588768458SSam Leffler hlen = sizeof (struct newesp) + sav->ivlen;
60688768458SSam Leffler
60788768458SSam Leffler /* Remove the ESP header and IV from the mbuf. */
60888768458SSam Leffler error = m_striphdr(m, skip, hlen);
60988768458SSam Leffler if (error) {
610a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_hdrops);
6119ffa9677SSam Leffler DPRINTF(("%s: bad mbuf chain, SA %s/%08lx\n", __func__,
612962ac6c7SAndrey V. Elsukov ipsec_address(&sav->sah->saidx.dst, buf, sizeof(buf)),
61388768458SSam Leffler (u_long) ntohl(sav->spi)));
61488768458SSam Leffler goto bad;
61588768458SSam Leffler }
61688768458SSam Leffler
61788768458SSam Leffler /* Save the last three bytes of decrypted data */
61888768458SSam Leffler m_copydata(m, m->m_pkthdr.len - 3, 3, lastthree);
61988768458SSam Leffler
62088768458SSam Leffler /* Verify pad length */
62188768458SSam Leffler if (lastthree[1] + 2 > m->m_pkthdr.len - skip) {
622a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_badilen);
6239ffa9677SSam Leffler DPRINTF(("%s: invalid padding length %d for %u byte packet "
624962ac6c7SAndrey V. Elsukov "in SA %s/%08lx\n", __func__, lastthree[1],
625962ac6c7SAndrey V. Elsukov m->m_pkthdr.len - skip,
626962ac6c7SAndrey V. Elsukov ipsec_address(&sav->sah->saidx.dst, buf, sizeof(buf)),
62788768458SSam Leffler (u_long) ntohl(sav->spi)));
62888768458SSam Leffler error = EINVAL;
62988768458SSam Leffler goto bad;
63088768458SSam Leffler }
63188768458SSam Leffler
63288768458SSam Leffler /* Verify correct decryption by checking the last padding bytes */
63388768458SSam Leffler if ((sav->flags & SADB_X_EXT_PMASK) != SADB_X_EXT_PRAND) {
63488768458SSam Leffler if (lastthree[1] != lastthree[0] && lastthree[1] != 0) {
635a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_badenc);
6369ffa9677SSam Leffler DPRINTF(("%s: decryption failed for packet in "
637962ac6c7SAndrey V. Elsukov "SA %s/%08lx\n", __func__, ipsec_address(
638962ac6c7SAndrey V. Elsukov &sav->sah->saidx.dst, buf, sizeof(buf)),
63988768458SSam Leffler (u_long) ntohl(sav->spi)));
64088768458SSam Leffler error = EINVAL;
64188768458SSam Leffler goto bad;
64288768458SSam Leffler }
64388768458SSam Leffler }
64488768458SSam Leffler
6453f44ee8eSAndrey V. Elsukov /*
6463f44ee8eSAndrey V. Elsukov * RFC4303 2.6:
6473f44ee8eSAndrey V. Elsukov * Silently drop packet if next header field is IPPROTO_NONE.
6483f44ee8eSAndrey V. Elsukov */
6493f44ee8eSAndrey V. Elsukov if (lastthree[2] == IPPROTO_NONE)
6503f44ee8eSAndrey V. Elsukov goto bad;
6513f44ee8eSAndrey V. Elsukov
65288768458SSam Leffler /* Trim the mbuf chain to remove trailing authenticator and padding */
65388768458SSam Leffler m_adj(m, -(lastthree[1] + 2));
65488768458SSam Leffler
65588768458SSam Leffler /* Restore the Next Protocol field */
65688768458SSam Leffler m_copyback(m, protoff, sizeof (u_int8_t), lastthree + 2);
65788768458SSam Leffler
658db178eb8SBjoern A. Zeeb switch (saidx->dst.sa.sa_family) {
659db178eb8SBjoern A. Zeeb #ifdef INET6
660db178eb8SBjoern A. Zeeb case AF_INET6:
661f0514a8bSAndrey V. Elsukov error = ipsec6_common_input_cb(m, sav, skip, protoff);
662db178eb8SBjoern A. Zeeb break;
663db178eb8SBjoern A. Zeeb #endif
664db178eb8SBjoern A. Zeeb #ifdef INET
665db178eb8SBjoern A. Zeeb case AF_INET:
666f0514a8bSAndrey V. Elsukov error = ipsec4_common_input_cb(m, sav, skip, protoff);
667db178eb8SBjoern A. Zeeb break;
668db178eb8SBjoern A. Zeeb #endif
669db178eb8SBjoern A. Zeeb default:
670db178eb8SBjoern A. Zeeb panic("%s: Unexpected address family: %d saidx=%p", __func__,
671db178eb8SBjoern A. Zeeb saidx->dst.sa.sa_family, saidx);
672db178eb8SBjoern A. Zeeb }
673fd40ecf3SJohn Baldwin CURVNET_RESTORE();
67488768458SSam Leffler return error;
67588768458SSam Leffler bad:
676fcf59617SAndrey V. Elsukov if (sav != NULL)
677fcf59617SAndrey V. Elsukov key_freesav(&sav);
67888768458SSam Leffler if (m != NULL)
67988768458SSam Leffler m_freem(m);
680fcf59617SAndrey V. Elsukov if (xd != NULL)
68135d9e00dSJohn Baldwin free(xd, M_ESP);
6824d36d1fdSMarcin Wojtas if (crp != NULL) {
68335d9e00dSJohn Baldwin free(crp->crp_aad, M_ESP);
68488768458SSam Leffler crypto_freereq(crp);
6854d36d1fdSMarcin Wojtas }
68691c35dd7SMateusz Guzik CURVNET_RESTORE();
68788768458SSam Leffler return error;
68888768458SSam Leffler }
68988768458SSam Leffler /*
690fcf59617SAndrey V. Elsukov * ESP output routine, called by ipsec[46]_perform_request().
69188768458SSam Leffler */
69288768458SSam Leffler static int
esp_output(struct mbuf * m,struct secpolicy * sp,struct secasvar * sav,u_int idx,int skip,int protoff)693fcf59617SAndrey V. Elsukov esp_output(struct mbuf *m, struct secpolicy *sp, struct secasvar *sav,
694fcf59617SAndrey V. Elsukov u_int idx, int skip, int protoff)
69588768458SSam Leffler {
6967f1f6591SAndrey V. Elsukov IPSEC_DEBUG_DECLARE(char buf[IPSEC_ADDRSTRLEN]);
69788768458SSam Leffler struct cryptop *crp;
698fcf59617SAndrey V. Elsukov const struct auth_hash *esph;
699fcf59617SAndrey V. Elsukov const struct enc_xform *espx;
700fcf59617SAndrey V. Elsukov struct mbuf *mo = NULL;
701fcf59617SAndrey V. Elsukov struct xform_data *xd;
702fcf59617SAndrey V. Elsukov struct secasindex *saidx;
703fcf59617SAndrey V. Elsukov unsigned char *pad;
704fcf59617SAndrey V. Elsukov uint8_t *ivp;
7052e08e39fSConrad Meyer uint64_t cntr;
7062e08e39fSConrad Meyer crypto_session_t cryptoid;
707fcf59617SAndrey V. Elsukov int hlen, rlen, padding, blks, alen, i, roff;
708fcf59617SAndrey V. Elsukov int error, maxpacketsize;
709fcf59617SAndrey V. Elsukov uint8_t prot;
7104d36d1fdSMarcin Wojtas uint32_t seqh;
7114d36d1fdSMarcin Wojtas const struct crypto_session_params *csp;
71288768458SSam Leffler
7130361f165SKristof Provost SECASVAR_RLOCK_TRACKER;
7140361f165SKristof Provost
7159ffa9677SSam Leffler IPSEC_ASSERT(sav != NULL, ("null SA"));
71688768458SSam Leffler esph = sav->tdb_authalgxform;
71788768458SSam Leffler espx = sav->tdb_encalgxform;
7189ffa9677SSam Leffler IPSEC_ASSERT(espx != NULL, ("null encoding xform"));
71988768458SSam Leffler
72088768458SSam Leffler if (sav->flags & SADB_X_EXT_OLD)
72188768458SSam Leffler hlen = sizeof (struct esp) + sav->ivlen;
72288768458SSam Leffler else
72388768458SSam Leffler hlen = sizeof (struct newesp) + sav->ivlen;
72488768458SSam Leffler
72588768458SSam Leffler rlen = m->m_pkthdr.len - skip; /* Raw payload length. */
72688768458SSam Leffler /*
727a2bc81bfSJohn-Mark Gurney * RFC4303 2.4 Requires 4 byte alignment.
728b01edfb5SMarcin Wojtas * Old versions of FreeBSD can't decrypt partial blocks encrypted
729b01edfb5SMarcin Wojtas * with AES-CTR. Align payload to native_blocksize (16 bytes)
730b01edfb5SMarcin Wojtas * in order to preserve compatibility.
73188768458SSam Leffler */
732b01edfb5SMarcin Wojtas if (SAV_ISCTR(sav) && V_esp_ctr_compatibility)
733b01edfb5SMarcin Wojtas blks = MAX(4, espx->native_blocksize); /* Cipher blocksize */
734b01edfb5SMarcin Wojtas else
735b01edfb5SMarcin Wojtas blks = MAX(4, espx->blocksize);
73688768458SSam Leffler
73788768458SSam Leffler /* XXX clamp padding length a la KAME??? */
73888768458SSam Leffler padding = ((blks - ((rlen + 2) % blks)) % blks) + 2;
73988768458SSam Leffler
740a09a7146SJohn-Mark Gurney alen = xform_ah_authsize(esph);
74188768458SSam Leffler
742a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_output);
74388768458SSam Leffler
74488768458SSam Leffler saidx = &sav->sah->saidx;
74588768458SSam Leffler /* Check for maximum packet size violations. */
74688768458SSam Leffler switch (saidx->dst.sa.sa_family) {
74788768458SSam Leffler #ifdef INET
74888768458SSam Leffler case AF_INET:
74988768458SSam Leffler maxpacketsize = IP_MAXPACKET;
75088768458SSam Leffler break;
75188768458SSam Leffler #endif /* INET */
75288768458SSam Leffler #ifdef INET6
75388768458SSam Leffler case AF_INET6:
75488768458SSam Leffler maxpacketsize = IPV6_MAXPACKET;
75588768458SSam Leffler break;
75688768458SSam Leffler #endif /* INET6 */
75788768458SSam Leffler default:
7589ffa9677SSam Leffler DPRINTF(("%s: unknown/unsupported protocol "
7599ffa9677SSam Leffler "family %d, SA %s/%08lx\n", __func__,
760962ac6c7SAndrey V. Elsukov saidx->dst.sa.sa_family, ipsec_address(&saidx->dst,
761962ac6c7SAndrey V. Elsukov buf, sizeof(buf)), (u_long) ntohl(sav->spi)));
762a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_nopf);
76388768458SSam Leffler error = EPFNOSUPPORT;
76488768458SSam Leffler goto bad;
76588768458SSam Leffler }
766fcf59617SAndrey V. Elsukov /*
76716de9ac1SGeorge V. Neville-Neil DPRINTF(("%s: skip %d hlen %d rlen %d padding %d alen %d blksd %d\n",
768fcf59617SAndrey V. Elsukov __func__, skip, hlen, rlen, padding, alen, blks)); */
76988768458SSam Leffler if (skip + hlen + rlen + padding + alen > maxpacketsize) {
7709ffa9677SSam Leffler DPRINTF(("%s: packet in SA %s/%08lx got too big "
7719ffa9677SSam Leffler "(len %u, max len %u)\n", __func__,
772962ac6c7SAndrey V. Elsukov ipsec_address(&saidx->dst, buf, sizeof(buf)),
773962ac6c7SAndrey V. Elsukov (u_long) ntohl(sav->spi),
77488768458SSam Leffler skip + hlen + rlen + padding + alen, maxpacketsize));
775a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_toobig);
77688768458SSam Leffler error = EMSGSIZE;
77788768458SSam Leffler goto bad;
77888768458SSam Leffler }
77988768458SSam Leffler
78088768458SSam Leffler /* Update the counters. */
781a04d64d8SAndrey V. Elsukov ESPSTAT_ADD(esps_obytes, m->m_pkthdr.len - skip);
78288768458SSam Leffler
78347e2996eSSam Leffler m = m_unshare(m, M_NOWAIT);
78488768458SSam Leffler if (m == NULL) {
7859ffa9677SSam Leffler DPRINTF(("%s: cannot clone mbuf chain, SA %s/%08lx\n", __func__,
786962ac6c7SAndrey V. Elsukov ipsec_address(&saidx->dst, buf, sizeof(buf)),
787962ac6c7SAndrey V. Elsukov (u_long) ntohl(sav->spi)));
788a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_hdrops);
78988768458SSam Leffler error = ENOBUFS;
79088768458SSam Leffler goto bad;
79188768458SSam Leffler }
79288768458SSam Leffler
79388768458SSam Leffler /* Inject ESP header. */
79488768458SSam Leffler mo = m_makespace(m, skip, hlen, &roff);
79588768458SSam Leffler if (mo == NULL) {
7969ffa9677SSam Leffler DPRINTF(("%s: %u byte ESP hdr inject failed for SA %s/%08lx\n",
797962ac6c7SAndrey V. Elsukov __func__, hlen, ipsec_address(&saidx->dst, buf,
798962ac6c7SAndrey V. Elsukov sizeof(buf)), (u_long) ntohl(sav->spi)));
799a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_hdrops); /* XXX diffs from openbsd */
80088768458SSam Leffler error = ENOBUFS;
80188768458SSam Leffler goto bad;
80288768458SSam Leffler }
80388768458SSam Leffler
80488768458SSam Leffler /* Initialize ESP header. */
805fcf59617SAndrey V. Elsukov bcopy((caddr_t) &sav->spi, mtod(mo, caddr_t) + roff,
806fcf59617SAndrey V. Elsukov sizeof(uint32_t));
8070361f165SKristof Provost SECASVAR_RLOCK(sav);
808fcf59617SAndrey V. Elsukov if (sav->replay) {
809fcf59617SAndrey V. Elsukov uint32_t replay;
810fcf59617SAndrey V. Elsukov
8110361f165SKristof Provost SECREPLAY_LOCK(sav->replay);
8126131838bSPawel Jakub Dawidek #ifdef REGRESSION
813dfa9422bSPawel Jakub Dawidek /* Emulate replay attack when ipsec_replay is TRUE. */
814603724d3SBjoern A. Zeeb if (!V_ipsec_replay)
8156131838bSPawel Jakub Dawidek #endif
816dfa9422bSPawel Jakub Dawidek sav->replay->count++;
8178b7f3994SMarcin Wojtas replay = htonl((uint32_t)sav->replay->count);
818fcf59617SAndrey V. Elsukov
819fcf59617SAndrey V. Elsukov bcopy((caddr_t) &replay, mtod(mo, caddr_t) + roff +
820fcf59617SAndrey V. Elsukov sizeof(uint32_t), sizeof(uint32_t));
8214d36d1fdSMarcin Wojtas
8224d36d1fdSMarcin Wojtas seqh = htonl((uint32_t)(sav->replay->count >> IPSEC_SEQH_SHIFT));
8230361f165SKristof Provost SECREPLAY_UNLOCK(sav->replay);
82488768458SSam Leffler }
825fcf59617SAndrey V. Elsukov cryptoid = sav->tdb_cryptoid;
8269f8f3a8eSKristof Provost if (SAV_ISCTRORGCM(sav) || SAV_ISCHACHA(sav))
827fcf59617SAndrey V. Elsukov cntr = sav->cntr++;
8280361f165SKristof Provost SECASVAR_RUNLOCK(sav);
82988768458SSam Leffler
83088768458SSam Leffler /*
83188768458SSam Leffler * Add padding -- better to do it ourselves than use the crypto engine,
83288768458SSam Leffler * although if/when we support compression, we'd have to do that.
83388768458SSam Leffler */
83488768458SSam Leffler pad = (u_char *) m_pad(m, padding + alen);
83588768458SSam Leffler if (pad == NULL) {
8369ffa9677SSam Leffler DPRINTF(("%s: m_pad failed for SA %s/%08lx\n", __func__,
837962ac6c7SAndrey V. Elsukov ipsec_address(&saidx->dst, buf, sizeof(buf)),
838962ac6c7SAndrey V. Elsukov (u_long) ntohl(sav->spi)));
83988768458SSam Leffler m = NULL; /* NB: free'd by m_pad */
84088768458SSam Leffler error = ENOBUFS;
84188768458SSam Leffler goto bad;
84288768458SSam Leffler }
84388768458SSam Leffler
84488768458SSam Leffler /*
84588768458SSam Leffler * Add padding: random, zero, or self-describing.
84688768458SSam Leffler * XXX catch unexpected setting
84788768458SSam Leffler */
84888768458SSam Leffler switch (sav->flags & SADB_X_EXT_PMASK) {
84988768458SSam Leffler case SADB_X_EXT_PRAND:
850a8a16c71SConrad Meyer arc4random_buf(pad, padding - 2);
85188768458SSam Leffler break;
85288768458SSam Leffler case SADB_X_EXT_PZERO:
85388768458SSam Leffler bzero(pad, padding - 2);
85488768458SSam Leffler break;
85588768458SSam Leffler case SADB_X_EXT_PSEQ:
85688768458SSam Leffler for (i = 0; i < padding - 2; i++)
85788768458SSam Leffler pad[i] = i+1;
85888768458SSam Leffler break;
85988768458SSam Leffler }
86088768458SSam Leffler
86188768458SSam Leffler /* Fix padding length and Next Protocol in padding itself. */
86288768458SSam Leffler pad[padding - 2] = padding - 2;
86388768458SSam Leffler m_copydata(m, protoff, sizeof(u_int8_t), pad + padding - 1);
86488768458SSam Leffler
86588768458SSam Leffler /* Fix Next Protocol in IPv4/IPv6 header. */
86688768458SSam Leffler prot = IPPROTO_ESP;
86788768458SSam Leffler m_copyback(m, protoff, sizeof(u_int8_t), (u_char *) &prot);
86888768458SSam Leffler
869c0341432SJohn Baldwin /* Get crypto descriptor. */
870c0341432SJohn Baldwin crp = crypto_getreq(cryptoid, M_NOWAIT);
87188768458SSam Leffler if (crp == NULL) {
872c0341432SJohn Baldwin DPRINTF(("%s: failed to acquire crypto descriptor\n",
8739ffa9677SSam Leffler __func__));
874a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_crypto);
87588768458SSam Leffler error = ENOBUFS;
87688768458SSam Leffler goto bad;
87788768458SSam Leffler }
87888768458SSam Leffler
879a2bc81bfSJohn-Mark Gurney /* IPsec-specific opaque crypto info. */
88035d9e00dSJohn Baldwin xd = malloc(sizeof(struct xform_data), M_ESP, M_NOWAIT | M_ZERO);
881fcf59617SAndrey V. Elsukov if (xd == NULL) {
882fcf59617SAndrey V. Elsukov DPRINTF(("%s: failed to allocate xform_data\n", __func__));
8834d36d1fdSMarcin Wojtas goto xd_fail;
884a2bc81bfSJohn-Mark Gurney }
885a2bc81bfSJohn-Mark Gurney
88688768458SSam Leffler /* Encryption descriptor. */
887c0341432SJohn Baldwin crp->crp_payload_start = skip + hlen;
888c0341432SJohn Baldwin crp->crp_payload_length = m->m_pkthdr.len - (skip + hlen + alen);
889c0341432SJohn Baldwin crp->crp_op = CRYPTO_OP_ENCRYPT;
89088768458SSam Leffler
891c161c46dSJohn Baldwin /* Generate cipher and ESP IVs. */
892c0341432SJohn Baldwin ivp = &crp->crp_iv[0];
8939f8f3a8eSKristof Provost if (SAV_ISCTRORGCM(sav) || SAV_ISCHACHA(sav)) {
894c161c46dSJohn Baldwin /*
895c161c46dSJohn Baldwin * See comment in esp_input() for details on the
896c161c46dSJohn Baldwin * cipher IV. A simple per-SA counter stored in
897c161c46dSJohn Baldwin * 'cntr' is used as the explicit ESP IV.
898c161c46dSJohn Baldwin */
899a2bc81bfSJohn-Mark Gurney memcpy(ivp, sav->key_enc->key_data +
900a2bc81bfSJohn-Mark Gurney _KEYLEN(sav->key_enc) - 4, 4);
901a2bc81bfSJohn-Mark Gurney be64enc(&ivp[4], cntr);
902a2bc81bfSJohn-Mark Gurney if (SAV_ISCTR(sav)) {
903a2bc81bfSJohn-Mark Gurney be32enc(&ivp[sav->ivlen + 4], 1);
904a2bc81bfSJohn-Mark Gurney }
905a2bc81bfSJohn-Mark Gurney m_copyback(m, skip + hlen - sav->ivlen, sav->ivlen, &ivp[4]);
906c0341432SJohn Baldwin crp->crp_flags |= CRYPTO_F_IV_SEPARATE;
907c0341432SJohn Baldwin } else if (sav->ivlen != 0) {
9088cbde414SJohn Baldwin arc4rand(ivp, sav->ivlen, 0);
909c0341432SJohn Baldwin crp->crp_iv_start = skip + hlen - sav->ivlen;
9108cbde414SJohn Baldwin m_copyback(m, crp->crp_iv_start, sav->ivlen, ivp);
91188768458SSam Leffler }
91288768458SSam Leffler
91388768458SSam Leffler /* Callback parameters */
914fcf59617SAndrey V. Elsukov xd->sp = sp;
915fcf59617SAndrey V. Elsukov xd->sav = sav;
916fcf59617SAndrey V. Elsukov xd->idx = idx;
917fcf59617SAndrey V. Elsukov xd->cryptoid = cryptoid;
918fd40ecf3SJohn Baldwin xd->vnet = curvnet;
91988768458SSam Leffler
92088768458SSam Leffler /* Crypto operation descriptor. */
921c0341432SJohn Baldwin crp->crp_flags |= CRYPTO_F_CBIFSYNC;
9229c0e3d3aSJohn Baldwin crypto_use_mbuf(crp, m);
92388768458SSam Leffler crp->crp_callback = esp_output_cb;
924c0341432SJohn Baldwin crp->crp_opaque = xd;
92588768458SSam Leffler
92688768458SSam Leffler if (esph) {
92788768458SSam Leffler /* Authentication descriptor. */
928c0341432SJohn Baldwin crp->crp_op |= CRYPTO_OP_COMPUTE_DIGEST;
9299f8f3a8eSKristof Provost if (SAV_ISGCM(sav) || SAV_ISCHACHA(sav))
930c0341432SJohn Baldwin crp->crp_aad_length = 8; /* RFC4106 5, SPI + SN */
93116de9ac1SGeorge V. Neville-Neil else
932c0341432SJohn Baldwin crp->crp_aad_length = hlen;
9334d36d1fdSMarcin Wojtas
9344d36d1fdSMarcin Wojtas csp = crypto_get_params(crp->crp_session);
9354d36d1fdSMarcin Wojtas if (csp->csp_flags & CSP_F_SEPARATE_AAD &&
9364d36d1fdSMarcin Wojtas sav->replay != NULL) {
9374d36d1fdSMarcin Wojtas int aad_skip;
9384d36d1fdSMarcin Wojtas
9394d36d1fdSMarcin Wojtas crp->crp_aad_length += sizeof(seqh);
94035d9e00dSJohn Baldwin crp->crp_aad = malloc(crp->crp_aad_length, M_ESP, M_NOWAIT);
9414d36d1fdSMarcin Wojtas if (crp->crp_aad == NULL) {
9424d36d1fdSMarcin Wojtas DPRINTF(("%s: failed to allocate xform_data\n",
9434d36d1fdSMarcin Wojtas __func__));
9444d36d1fdSMarcin Wojtas goto crp_aad_fail;
9454d36d1fdSMarcin Wojtas }
9464d36d1fdSMarcin Wojtas
9474d36d1fdSMarcin Wojtas /* SPI */
9484d36d1fdSMarcin Wojtas m_copydata(m, skip, SPI_SIZE, crp->crp_aad);
9494d36d1fdSMarcin Wojtas aad_skip = SPI_SIZE;
9504d36d1fdSMarcin Wojtas
9514d36d1fdSMarcin Wojtas /* ESN */
9524d36d1fdSMarcin Wojtas bcopy(&seqh, (char *)crp->crp_aad + aad_skip, sizeof(seqh));
9534d36d1fdSMarcin Wojtas aad_skip += sizeof(seqh);
9544d36d1fdSMarcin Wojtas
9554d36d1fdSMarcin Wojtas /* Rest of aad */
9564d36d1fdSMarcin Wojtas if (crp->crp_aad_length - aad_skip > 0)
9574d36d1fdSMarcin Wojtas m_copydata(m, skip + SPI_SIZE,
9584d36d1fdSMarcin Wojtas crp->crp_aad_length - aad_skip,
9594d36d1fdSMarcin Wojtas (char *)crp->crp_aad + aad_skip);
9604d36d1fdSMarcin Wojtas } else
9614d36d1fdSMarcin Wojtas crp->crp_aad_start = skip;
9624d36d1fdSMarcin Wojtas
9634d36d1fdSMarcin Wojtas if (csp->csp_flags & CSP_F_ESN && sav->replay != NULL)
9644d36d1fdSMarcin Wojtas memcpy(crp->crp_esn, &seqh, sizeof(seqh));
9654d36d1fdSMarcin Wojtas
966c0341432SJohn Baldwin crp->crp_digest_start = m->m_pkthdr.len - alen;
96716de9ac1SGeorge V. Neville-Neil }
96816de9ac1SGeorge V. Neville-Neil
96968f6800cSMark Johnston if (V_async_crypto)
97068f6800cSMark Johnston return (crypto_dispatch_async(crp, CRYPTO_ASYNC_ORDERED));
97168f6800cSMark Johnston else
97268f6800cSMark Johnston return (crypto_dispatch(crp));
9734d36d1fdSMarcin Wojtas
9744d36d1fdSMarcin Wojtas crp_aad_fail:
97535d9e00dSJohn Baldwin free(xd, M_ESP);
9764d36d1fdSMarcin Wojtas xd_fail:
9774d36d1fdSMarcin Wojtas crypto_freereq(crp);
9784d36d1fdSMarcin Wojtas ESPSTAT_INC(esps_crypto);
9794d36d1fdSMarcin Wojtas error = ENOBUFS;
98088768458SSam Leffler bad:
98188768458SSam Leffler if (m)
98288768458SSam Leffler m_freem(m);
9833aee7099SAndrey V. Elsukov key_freesav(&sav);
9843aee7099SAndrey V. Elsukov key_freesp(&sp);
98588768458SSam Leffler return (error);
98688768458SSam Leffler }
98788768458SSam Leffler /*
98888768458SSam Leffler * ESP output callback from the crypto driver.
98988768458SSam Leffler */
99088768458SSam Leffler static int
esp_output_cb(struct cryptop * crp)99188768458SSam Leffler esp_output_cb(struct cryptop *crp)
99288768458SSam Leffler {
993fcf59617SAndrey V. Elsukov struct xform_data *xd;
994fcf59617SAndrey V. Elsukov struct secpolicy *sp;
99588768458SSam Leffler struct secasvar *sav;
99688768458SSam Leffler struct mbuf *m;
9972e08e39fSConrad Meyer crypto_session_t cryptoid;
998fcf59617SAndrey V. Elsukov u_int idx;
9990e4fb1dbSPawel Jakub Dawidek int error;
100088768458SSam Leffler
1001fcf59617SAndrey V. Elsukov xd = (struct xform_data *) crp->crp_opaque;
1002fd40ecf3SJohn Baldwin CURVNET_SET(xd->vnet);
10039c0e3d3aSJohn Baldwin m = crp->crp_buf.cb_mbuf;
1004fcf59617SAndrey V. Elsukov sp = xd->sp;
1005fcf59617SAndrey V. Elsukov sav = xd->sav;
1006fcf59617SAndrey V. Elsukov idx = xd->idx;
1007fcf59617SAndrey V. Elsukov cryptoid = xd->cryptoid;
100888768458SSam Leffler
100988768458SSam Leffler /* Check for crypto errors. */
101088768458SSam Leffler if (crp->crp_etype) {
101188768458SSam Leffler if (crp->crp_etype == EAGAIN) {
1012fcf59617SAndrey V. Elsukov /* Reset the session ID */
10131b0909d5SConrad Meyer if (ipsec_updateid(sav, &crp->crp_session, &cryptoid) != 0)
1014fcf59617SAndrey V. Elsukov crypto_freesession(cryptoid);
10151b0909d5SConrad Meyer xd->cryptoid = crp->crp_session;
1016fd40ecf3SJohn Baldwin CURVNET_RESTORE();
10170a95a08eSPawel Jakub Dawidek return (crypto_dispatch(crp));
101888768458SSam Leffler }
1019a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_noxform);
10209ffa9677SSam Leffler DPRINTF(("%s: crypto error %d\n", __func__, crp->crp_etype));
102188768458SSam Leffler error = crp->crp_etype;
1022fcf59617SAndrey V. Elsukov m_freem(m);
102388768458SSam Leffler goto bad;
102488768458SSam Leffler }
102588768458SSam Leffler
102688768458SSam Leffler /* Shouldn't happen... */
102788768458SSam Leffler if (m == NULL) {
1028a04d64d8SAndrey V. Elsukov ESPSTAT_INC(esps_crypto);
10299ffa9677SSam Leffler DPRINTF(("%s: bogus returned buffer from crypto\n", __func__));
103088768458SSam Leffler error = EINVAL;
103188768458SSam Leffler goto bad;
103288768458SSam Leffler }
103335d9e00dSJohn Baldwin free(xd, M_ESP);
103435d9e00dSJohn Baldwin free(crp->crp_aad, M_ESP);
1035fcf59617SAndrey V. Elsukov crypto_freereq(crp);
1036*b1c3a4d7SKristof Provost ESPSTAT_INC2(esps_hist, sav->alg_enc);
103788768458SSam Leffler if (sav->tdb_authalgxform != NULL)
1038*b1c3a4d7SKristof Provost AHSTAT_INC2(ahs_hist, sav->alg_auth);
103988768458SSam Leffler
10406131838bSPawel Jakub Dawidek #ifdef REGRESSION
1041dfa9422bSPawel Jakub Dawidek /* Emulate man-in-the-middle attack when ipsec_integrity is TRUE. */
1042603724d3SBjoern A. Zeeb if (V_ipsec_integrity) {
1043442da28aSVANHULLEBUS Yvan static unsigned char ipseczeroes[AH_HMAC_MAXHASHLEN];
1044fcf59617SAndrey V. Elsukov const struct auth_hash *esph;
1045dfa9422bSPawel Jakub Dawidek
1046dfa9422bSPawel Jakub Dawidek /*
1047dfa9422bSPawel Jakub Dawidek * Corrupt HMAC if we want to test integrity verification of
1048dfa9422bSPawel Jakub Dawidek * the other side.
1049dfa9422bSPawel Jakub Dawidek */
1050dfa9422bSPawel Jakub Dawidek esph = sav->tdb_authalgxform;
1051dfa9422bSPawel Jakub Dawidek if (esph != NULL) {
1052442da28aSVANHULLEBUS Yvan int alen;
1053442da28aSVANHULLEBUS Yvan
1054a09a7146SJohn-Mark Gurney alen = xform_ah_authsize(esph);
1055442da28aSVANHULLEBUS Yvan m_copyback(m, m->m_pkthdr.len - alen,
1056442da28aSVANHULLEBUS Yvan alen, ipseczeroes);
1057dfa9422bSPawel Jakub Dawidek }
1058dfa9422bSPawel Jakub Dawidek }
10596131838bSPawel Jakub Dawidek #endif
1060dfa9422bSPawel Jakub Dawidek
106188768458SSam Leffler /* NB: m is reclaimed by ipsec_process_done. */
1062fcf59617SAndrey V. Elsukov error = ipsec_process_done(m, sp, sav, idx);
1063fd40ecf3SJohn Baldwin CURVNET_RESTORE();
10643d80e82dSAndrey V. Elsukov return (error);
106588768458SSam Leffler bad:
106635d9e00dSJohn Baldwin free(xd, M_ESP);
106735d9e00dSJohn Baldwin free(crp->crp_aad, M_ESP);
106888768458SSam Leffler crypto_freereq(crp);
1069fcf59617SAndrey V. Elsukov key_freesav(&sav);
1070fcf59617SAndrey V. Elsukov key_freesp(&sp);
107191c35dd7SMateusz Guzik CURVNET_RESTORE();
10723d80e82dSAndrey V. Elsukov return (error);
107388768458SSam Leffler }
107488768458SSam Leffler
107588768458SSam Leffler static struct xformsw esp_xformsw = {
1076fcf59617SAndrey V. Elsukov .xf_type = XF_ESP,
1077fcf59617SAndrey V. Elsukov .xf_name = "IPsec ESP",
1078fcf59617SAndrey V. Elsukov .xf_init = esp_init,
1079dae61c9dSJohn Baldwin .xf_cleanup = esp_cleanup,
1080fcf59617SAndrey V. Elsukov .xf_input = esp_input,
1081fcf59617SAndrey V. Elsukov .xf_output = esp_output,
108288768458SSam Leffler };
108388768458SSam Leffler
1084fcf59617SAndrey V. Elsukov SYSINIT(esp_xform_init, SI_SUB_PROTO_DOMAIN, SI_ORDER_MIDDLE,
1085fcf59617SAndrey V. Elsukov xform_attach, &esp_xformsw);
1086fcf59617SAndrey V. Elsukov SYSUNINIT(esp_xform_uninit, SI_SUB_PROTO_DOMAIN, SI_ORDER_MIDDLE,
1087fcf59617SAndrey V. Elsukov xform_detach, &esp_xformsw);
1088