xref: /freebsd/sys/opencrypto/ktls_ocf.c (revision cc68614da8232d8baaca0ae0d0dd8f890f06623e)
1 /*-
2  * SPDX-License-Identifier: BSD-2-Clause
3  *
4  * Copyright (c) 2019 Netflix Inc.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  */
27 
28 #include <sys/cdefs.h>
29 __FBSDID("$FreeBSD$");
30 
31 #include <sys/param.h>
32 #include <sys/systm.h>
33 #include <sys/counter.h>
34 #include <sys/endian.h>
35 #include <sys/kernel.h>
36 #include <sys/ktls.h>
37 #include <sys/lock.h>
38 #include <sys/malloc.h>
39 #include <sys/mbuf.h>
40 #include <sys/module.h>
41 #include <sys/mutex.h>
42 #include <sys/sysctl.h>
43 #include <sys/uio.h>
44 #include <vm/vm.h>
45 #include <vm/pmap.h>
46 #include <vm/vm_param.h>
47 #include <opencrypto/cryptodev.h>
48 #include <opencrypto/ktls.h>
49 
50 struct ktls_ocf_session {
51 	crypto_session_t sid;
52 	crypto_session_t mac_sid;
53 	struct mtx lock;
54 	int mac_len;
55 	bool implicit_iv;
56 
57 	/* Only used for TLS 1.0 with the implicit IV. */
58 #ifdef INVARIANTS
59 	bool in_progress;
60 	uint64_t next_seqno;
61 #endif
62 	char iv[AES_BLOCK_LEN];
63 };
64 
65 struct ocf_operation {
66 	struct ktls_ocf_session *os;
67 	bool done;
68 };
69 
70 static MALLOC_DEFINE(M_KTLS_OCF, "ktls_ocf", "OCF KTLS");
71 
72 SYSCTL_DECL(_kern_ipc_tls);
73 SYSCTL_DECL(_kern_ipc_tls_stats);
74 
75 static SYSCTL_NODE(_kern_ipc_tls_stats, OID_AUTO, ocf,
76     CTLFLAG_RD | CTLFLAG_MPSAFE, 0,
77     "Kernel TLS offload via OCF stats");
78 
79 static COUNTER_U64_DEFINE_EARLY(ocf_tls10_cbc_encrypts);
80 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls10_cbc_encrypts,
81     CTLFLAG_RD, &ocf_tls10_cbc_encrypts,
82     "Total number of OCF TLS 1.0 CBC encryption operations");
83 
84 static COUNTER_U64_DEFINE_EARLY(ocf_tls11_cbc_encrypts);
85 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls11_cbc_encrypts,
86     CTLFLAG_RD, &ocf_tls11_cbc_encrypts,
87     "Total number of OCF TLS 1.1/1.2 CBC encryption operations");
88 
89 static COUNTER_U64_DEFINE_EARLY(ocf_tls12_gcm_decrypts);
90 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls12_gcm_decrypts,
91     CTLFLAG_RD, &ocf_tls12_gcm_decrypts,
92     "Total number of OCF TLS 1.2 GCM decryption operations");
93 
94 static COUNTER_U64_DEFINE_EARLY(ocf_tls12_gcm_encrypts);
95 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls12_gcm_encrypts,
96     CTLFLAG_RD, &ocf_tls12_gcm_encrypts,
97     "Total number of OCF TLS 1.2 GCM encryption operations");
98 
99 static COUNTER_U64_DEFINE_EARLY(ocf_tls12_chacha20_decrypts);
100 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls12_chacha20_decrypts,
101     CTLFLAG_RD, &ocf_tls12_chacha20_decrypts,
102     "Total number of OCF TLS 1.2 Chacha20-Poly1305 decryption operations");
103 
104 static COUNTER_U64_DEFINE_EARLY(ocf_tls12_chacha20_encrypts);
105 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls12_chacha20_encrypts,
106     CTLFLAG_RD, &ocf_tls12_chacha20_encrypts,
107     "Total number of OCF TLS 1.2 Chacha20-Poly1305 encryption operations");
108 
109 static COUNTER_U64_DEFINE_EARLY(ocf_tls13_gcm_decrypts);
110 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls13_gcm_decrypts,
111     CTLFLAG_RD, &ocf_tls13_gcm_decrypts,
112     "Total number of OCF TLS 1.3 GCM decryption operations");
113 
114 static COUNTER_U64_DEFINE_EARLY(ocf_tls13_gcm_encrypts);
115 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls13_gcm_encrypts,
116     CTLFLAG_RD, &ocf_tls13_gcm_encrypts,
117     "Total number of OCF TLS 1.3 GCM encryption operations");
118 
119 static COUNTER_U64_DEFINE_EARLY(ocf_tls13_chacha20_decrypts);
120 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls13_chacha20_decrypts,
121     CTLFLAG_RD, &ocf_tls13_chacha20_decrypts,
122     "Total number of OCF TLS 1.3 Chacha20-Poly1305 decryption operations");
123 
124 static COUNTER_U64_DEFINE_EARLY(ocf_tls13_chacha20_encrypts);
125 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, tls13_chacha20_encrypts,
126     CTLFLAG_RD, &ocf_tls13_chacha20_encrypts,
127     "Total number of OCF TLS 1.3 Chacha20-Poly1305 encryption operations");
128 
129 static COUNTER_U64_DEFINE_EARLY(ocf_inplace);
130 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, inplace,
131     CTLFLAG_RD, &ocf_inplace,
132     "Total number of OCF in-place operations");
133 
134 static COUNTER_U64_DEFINE_EARLY(ocf_separate_output);
135 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, separate_output,
136     CTLFLAG_RD, &ocf_separate_output,
137     "Total number of OCF operations with a separate output buffer");
138 
139 static COUNTER_U64_DEFINE_EARLY(ocf_retries);
140 SYSCTL_COUNTER_U64(_kern_ipc_tls_stats_ocf, OID_AUTO, retries, CTLFLAG_RD,
141     &ocf_retries,
142     "Number of OCF encryption operation retries");
143 
144 static int
145 ktls_ocf_callback_sync(struct cryptop *crp __unused)
146 {
147 	return (0);
148 }
149 
150 static int
151 ktls_ocf_callback_async(struct cryptop *crp)
152 {
153 	struct ocf_operation *oo;
154 
155 	oo = crp->crp_opaque;
156 	mtx_lock(&oo->os->lock);
157 	oo->done = true;
158 	mtx_unlock(&oo->os->lock);
159 	wakeup(oo);
160 	return (0);
161 }
162 
163 static int
164 ktls_ocf_dispatch(struct ktls_ocf_session *os, struct cryptop *crp)
165 {
166 	struct ocf_operation oo;
167 	int error;
168 	bool async;
169 
170 	oo.os = os;
171 	oo.done = false;
172 
173 	crp->crp_opaque = &oo;
174 	for (;;) {
175 		async = !CRYPTO_SESS_SYNC(crp->crp_session);
176 		crp->crp_callback = async ? ktls_ocf_callback_async :
177 		    ktls_ocf_callback_sync;
178 
179 		error = crypto_dispatch(crp);
180 		if (error)
181 			break;
182 		if (async) {
183 			mtx_lock(&os->lock);
184 			while (!oo.done)
185 				mtx_sleep(&oo, &os->lock, 0, "ocfktls", 0);
186 			mtx_unlock(&os->lock);
187 		}
188 
189 		if (crp->crp_etype != EAGAIN) {
190 			error = crp->crp_etype;
191 			break;
192 		}
193 
194 		crp->crp_etype = 0;
195 		crp->crp_flags &= ~CRYPTO_F_DONE;
196 		oo.done = false;
197 		counter_u64_add(ocf_retries, 1);
198 	}
199 	return (error);
200 }
201 
202 static int
203 ktls_ocf_dispatch_async_cb(struct cryptop *crp)
204 {
205 	struct ktls_ocf_encrypt_state *state;
206 	int error;
207 
208 	state = crp->crp_opaque;
209 	if (crp->crp_etype == EAGAIN) {
210 		crp->crp_etype = 0;
211 		crp->crp_flags &= ~CRYPTO_F_DONE;
212 		counter_u64_add(ocf_retries, 1);
213 		error = crypto_dispatch(crp);
214 		if (error != 0) {
215 			crypto_destroyreq(crp);
216 			ktls_encrypt_cb(state, error);
217 		}
218 		return (0);
219 	}
220 
221 	error = crp->crp_etype;
222 	crypto_destroyreq(crp);
223 	ktls_encrypt_cb(state, error);
224 	return (0);
225 }
226 
227 static int
228 ktls_ocf_dispatch_async(struct ktls_ocf_encrypt_state *state,
229     struct cryptop *crp)
230 {
231 	int error;
232 
233 	crp->crp_opaque = state;
234 	crp->crp_callback = ktls_ocf_dispatch_async_cb;
235 	error = crypto_dispatch(crp);
236 	if (error != 0)
237 		crypto_destroyreq(crp);
238 	return (error);
239 }
240 
241 static int
242 ktls_ocf_tls_cbc_encrypt(struct ktls_ocf_encrypt_state *state,
243     struct ktls_session *tls, struct mbuf *m, struct iovec *outiov,
244     int outiovcnt)
245 {
246 	const struct tls_record_layer *hdr;
247 	struct uio *uio;
248 	struct tls_mac_data *ad;
249 	struct cryptop *crp;
250 	struct ktls_ocf_session *os;
251 	struct iovec iov[m->m_epg_npgs + 2];
252 	u_int pgoff;
253 	int i, error;
254 	uint16_t tls_comp_len;
255 	uint8_t pad;
256 
257 	MPASS(outiovcnt + 1 <= nitems(iov));
258 
259 	os = tls->ocf_session;
260 	hdr = (const struct tls_record_layer *)m->m_epg_hdr;
261 	crp = &state->crp;
262 	uio = &state->uio;
263 	MPASS(tls->sync_dispatch);
264 
265 #ifdef INVARIANTS
266 	if (os->implicit_iv) {
267 		mtx_lock(&os->lock);
268 		KASSERT(!os->in_progress,
269 		    ("concurrent implicit IV encryptions"));
270 		if (os->next_seqno != m->m_epg_seqno) {
271 			printf("KTLS CBC: TLS records out of order.  "
272 			    "Expected %ju, got %ju\n",
273 			    (uintmax_t)os->next_seqno,
274 			    (uintmax_t)m->m_epg_seqno);
275 			mtx_unlock(&os->lock);
276 			return (EINVAL);
277 		}
278 		os->in_progress = true;
279 		mtx_unlock(&os->lock);
280 	}
281 #endif
282 
283 	/* Payload length. */
284 	tls_comp_len = m->m_len - (m->m_epg_hdrlen + m->m_epg_trllen);
285 
286 	/* Initialize the AAD. */
287 	ad = &state->mac;
288 	ad->seq = htobe64(m->m_epg_seqno);
289 	ad->type = hdr->tls_type;
290 	ad->tls_vmajor = hdr->tls_vmajor;
291 	ad->tls_vminor = hdr->tls_vminor;
292 	ad->tls_length = htons(tls_comp_len);
293 
294 	/* First, compute the MAC. */
295 	iov[0].iov_base = ad;
296 	iov[0].iov_len = sizeof(*ad);
297 	pgoff = m->m_epg_1st_off;
298 	for (i = 0; i < m->m_epg_npgs; i++, pgoff = 0) {
299 		iov[i + 1].iov_base = (void *)PHYS_TO_DMAP(m->m_epg_pa[i] +
300 		    pgoff);
301 		iov[i + 1].iov_len = m_epg_pagelen(m, i, pgoff);
302 	}
303 	iov[m->m_epg_npgs + 1].iov_base = m->m_epg_trail;
304 	iov[m->m_epg_npgs + 1].iov_len = os->mac_len;
305 	uio->uio_iov = iov;
306 	uio->uio_iovcnt = m->m_epg_npgs + 2;
307 	uio->uio_offset = 0;
308 	uio->uio_segflg = UIO_SYSSPACE;
309 	uio->uio_td = curthread;
310 	uio->uio_resid = sizeof(*ad) + tls_comp_len + os->mac_len;
311 
312 	crypto_initreq(crp, os->mac_sid);
313 	crp->crp_payload_start = 0;
314 	crp->crp_payload_length = sizeof(*ad) + tls_comp_len;
315 	crp->crp_digest_start = crp->crp_payload_length;
316 	crp->crp_op = CRYPTO_OP_COMPUTE_DIGEST;
317 	crp->crp_flags = CRYPTO_F_CBIMM;
318 	crypto_use_uio(crp, uio);
319 	error = ktls_ocf_dispatch(os, crp);
320 
321 	crypto_destroyreq(crp);
322 	if (error) {
323 #ifdef INVARIANTS
324 		if (os->implicit_iv) {
325 			mtx_lock(&os->lock);
326 			os->in_progress = false;
327 			mtx_unlock(&os->lock);
328 		}
329 #endif
330 		return (error);
331 	}
332 
333 	/* Second, add the padding. */
334 	pad = m->m_epg_trllen - os->mac_len - 1;
335 	for (i = 0; i < pad + 1; i++)
336 		m->m_epg_trail[os->mac_len + i] = pad;
337 
338 	/* Finally, encrypt the record. */
339 	crypto_initreq(crp, os->sid);
340 	crp->crp_payload_start = m->m_epg_hdrlen;
341 	crp->crp_payload_length = tls_comp_len + m->m_epg_trllen;
342 	KASSERT(crp->crp_payload_length % AES_BLOCK_LEN == 0,
343 	    ("invalid encryption size"));
344 	crypto_use_single_mbuf(crp, m);
345 	crp->crp_op = CRYPTO_OP_ENCRYPT;
346 	crp->crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
347 	if (os->implicit_iv)
348 		memcpy(crp->crp_iv, os->iv, AES_BLOCK_LEN);
349 	else
350 		memcpy(crp->crp_iv, hdr + 1, AES_BLOCK_LEN);
351 
352 	if (outiov != NULL) {
353 		uio->uio_iov = outiov;
354 		uio->uio_iovcnt = outiovcnt;
355 		uio->uio_offset = 0;
356 		uio->uio_segflg = UIO_SYSSPACE;
357 		uio->uio_td = curthread;
358 		uio->uio_resid = crp->crp_payload_length;
359 		crypto_use_output_uio(crp, uio);
360 	}
361 
362 	if (os->implicit_iv)
363 		counter_u64_add(ocf_tls10_cbc_encrypts, 1);
364 	else
365 		counter_u64_add(ocf_tls11_cbc_encrypts, 1);
366 	if (outiov != NULL)
367 		counter_u64_add(ocf_separate_output, 1);
368 	else
369 		counter_u64_add(ocf_inplace, 1);
370 	error = ktls_ocf_dispatch(os, crp);
371 
372 	crypto_destroyreq(crp);
373 
374 	if (os->implicit_iv) {
375 		KASSERT(os->mac_len + pad + 1 >= AES_BLOCK_LEN,
376 		    ("trailer too short to read IV"));
377 		memcpy(os->iv, m->m_epg_trail + m->m_epg_trllen - AES_BLOCK_LEN,
378 		    AES_BLOCK_LEN);
379 #ifdef INVARIANTS
380 		mtx_lock(&os->lock);
381 		os->next_seqno = m->m_epg_seqno + 1;
382 		os->in_progress = false;
383 		mtx_unlock(&os->lock);
384 #endif
385 	}
386 	return (error);
387 }
388 
389 static int
390 ktls_ocf_tls12_aead_encrypt(struct ktls_ocf_encrypt_state *state,
391     struct ktls_session *tls, struct mbuf *m, struct iovec *outiov,
392     int outiovcnt)
393 {
394 	const struct tls_record_layer *hdr;
395 	struct uio *uio;
396 	struct tls_aead_data *ad;
397 	struct cryptop *crp;
398 	struct ktls_ocf_session *os;
399 	int error;
400 	uint16_t tls_comp_len;
401 
402 	os = tls->ocf_session;
403 	hdr = (const struct tls_record_layer *)m->m_epg_hdr;
404 	crp = &state->crp;
405 	uio = &state->uio;
406 
407 	crypto_initreq(crp, os->sid);
408 
409 	/* Setup the IV. */
410 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
411 		memcpy(crp->crp_iv, tls->params.iv, TLS_AEAD_GCM_LEN);
412 		memcpy(crp->crp_iv + TLS_AEAD_GCM_LEN, hdr + 1,
413 		    sizeof(uint64_t));
414 	} else {
415 		/*
416 		 * Chacha20-Poly1305 constructs the IV for TLS 1.2
417 		 * identically to constructing the IV for AEAD in TLS
418 		 * 1.3.
419 		 */
420 		memcpy(crp->crp_iv, tls->params.iv, tls->params.iv_len);
421 		*(uint64_t *)(crp->crp_iv + 4) ^= htobe64(m->m_epg_seqno);
422 	}
423 
424 	/* Setup the AAD. */
425 	ad = &state->aead;
426 	tls_comp_len = m->m_len - (m->m_epg_hdrlen + m->m_epg_trllen);
427 	ad->seq = htobe64(m->m_epg_seqno);
428 	ad->type = hdr->tls_type;
429 	ad->tls_vmajor = hdr->tls_vmajor;
430 	ad->tls_vminor = hdr->tls_vminor;
431 	ad->tls_length = htons(tls_comp_len);
432 	crp->crp_aad = ad;
433 	crp->crp_aad_length = sizeof(*ad);
434 
435 	/* Set fields for input payload. */
436 	crypto_use_single_mbuf(crp, m);
437 	crp->crp_payload_start = m->m_epg_hdrlen;
438 	crp->crp_payload_length = tls_comp_len;
439 
440 	if (outiov != NULL) {
441 		crp->crp_digest_start = crp->crp_payload_length;
442 
443 		uio->uio_iov = outiov;
444 		uio->uio_iovcnt = outiovcnt;
445 		uio->uio_offset = 0;
446 		uio->uio_segflg = UIO_SYSSPACE;
447 		uio->uio_td = curthread;
448 		uio->uio_resid = crp->crp_payload_length + tls->params.tls_tlen;
449 		crypto_use_output_uio(crp, uio);
450 	} else
451 		crp->crp_digest_start = crp->crp_payload_start +
452 		    crp->crp_payload_length;
453 
454 	crp->crp_op = CRYPTO_OP_ENCRYPT | CRYPTO_OP_COMPUTE_DIGEST;
455 	crp->crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
456 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
457 		counter_u64_add(ocf_tls12_gcm_encrypts, 1);
458 	else
459 		counter_u64_add(ocf_tls12_chacha20_encrypts, 1);
460 	if (outiov != NULL)
461 		counter_u64_add(ocf_separate_output, 1);
462 	else
463 		counter_u64_add(ocf_inplace, 1);
464 	if (tls->sync_dispatch) {
465 		error = ktls_ocf_dispatch(os, crp);
466 		crypto_destroyreq(crp);
467 	} else
468 		error = ktls_ocf_dispatch_async(state, crp);
469 	return (error);
470 }
471 
472 static int
473 ktls_ocf_tls12_aead_decrypt(struct ktls_session *tls,
474     const struct tls_record_layer *hdr, struct mbuf *m, uint64_t seqno,
475     int *trailer_len)
476 {
477 	struct tls_aead_data ad;
478 	struct cryptop crp;
479 	struct ktls_ocf_session *os;
480 	int error;
481 	uint16_t tls_comp_len;
482 
483 	os = tls->ocf_session;
484 
485 	crypto_initreq(&crp, os->sid);
486 
487 	/* Setup the IV. */
488 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
489 		memcpy(crp.crp_iv, tls->params.iv, TLS_AEAD_GCM_LEN);
490 		memcpy(crp.crp_iv + TLS_AEAD_GCM_LEN, hdr + 1,
491 		    sizeof(uint64_t));
492 	} else {
493 		/*
494 		 * Chacha20-Poly1305 constructs the IV for TLS 1.2
495 		 * identically to constructing the IV for AEAD in TLS
496 		 * 1.3.
497 		 */
498 		memcpy(crp.crp_iv, tls->params.iv, tls->params.iv_len);
499 		*(uint64_t *)(crp.crp_iv + 4) ^= htobe64(seqno);
500 	}
501 
502 	/* Setup the AAD. */
503 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
504 		tls_comp_len = ntohs(hdr->tls_length) -
505 		    (AES_GMAC_HASH_LEN + sizeof(uint64_t));
506 	else
507 		tls_comp_len = ntohs(hdr->tls_length) - POLY1305_HASH_LEN;
508 	ad.seq = htobe64(seqno);
509 	ad.type = hdr->tls_type;
510 	ad.tls_vmajor = hdr->tls_vmajor;
511 	ad.tls_vminor = hdr->tls_vminor;
512 	ad.tls_length = htons(tls_comp_len);
513 	crp.crp_aad = &ad;
514 	crp.crp_aad_length = sizeof(ad);
515 
516 	crp.crp_payload_start = tls->params.tls_hlen;
517 	crp.crp_payload_length = tls_comp_len;
518 	crp.crp_digest_start = crp.crp_payload_start + crp.crp_payload_length;
519 
520 	crp.crp_op = CRYPTO_OP_DECRYPT | CRYPTO_OP_VERIFY_DIGEST;
521 	crp.crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
522 	crypto_use_mbuf(&crp, m);
523 
524 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
525 		counter_u64_add(ocf_tls12_gcm_decrypts, 1);
526 	else
527 		counter_u64_add(ocf_tls12_chacha20_decrypts, 1);
528 	error = ktls_ocf_dispatch(os, &crp);
529 
530 	crypto_destroyreq(&crp);
531 	*trailer_len = tls->params.tls_tlen;
532 	return (error);
533 }
534 
535 static int
536 ktls_ocf_tls13_aead_encrypt(struct ktls_ocf_encrypt_state *state,
537     struct ktls_session *tls, struct mbuf *m, struct iovec *outiov,
538     int outiovcnt)
539 {
540 	const struct tls_record_layer *hdr;
541 	struct uio *uio;
542 	struct tls_aead_data_13 *ad;
543 	struct cryptop *crp;
544 	struct ktls_ocf_session *os;
545 	char nonce[12];
546 	int error;
547 
548 	os = tls->ocf_session;
549 	hdr = (const struct tls_record_layer *)m->m_epg_hdr;
550 	crp = &state->crp;
551 	uio = &state->uio;
552 
553 	crypto_initreq(crp, os->sid);
554 
555 	/* Setup the nonce. */
556 	memcpy(nonce, tls->params.iv, tls->params.iv_len);
557 	*(uint64_t *)(nonce + 4) ^= htobe64(m->m_epg_seqno);
558 
559 	/* Setup the AAD. */
560 	ad = &state->aead13;
561 	ad->type = hdr->tls_type;
562 	ad->tls_vmajor = hdr->tls_vmajor;
563 	ad->tls_vminor = hdr->tls_vminor;
564 	ad->tls_length = hdr->tls_length;
565 	crp->crp_aad = ad;
566 	crp->crp_aad_length = sizeof(*ad);
567 
568 	/* Set fields for input payload. */
569 	crypto_use_single_mbuf(crp, m);
570 	crp->crp_payload_start = m->m_epg_hdrlen;
571 	crp->crp_payload_length = m->m_len -
572 	    (m->m_epg_hdrlen + m->m_epg_trllen);
573 
574 	/* Store the record type as the first byte of the trailer. */
575 	m->m_epg_trail[0] = m->m_epg_record_type;
576 	crp->crp_payload_length++;
577 
578 	if (outiov != NULL) {
579 		crp->crp_digest_start = crp->crp_payload_length;
580 
581 		uio->uio_iov = outiov;
582 		uio->uio_iovcnt = outiovcnt;
583 		uio->uio_offset = 0;
584 		uio->uio_segflg = UIO_SYSSPACE;
585 		uio->uio_td = curthread;
586 		uio->uio_resid = m->m_len - m->m_epg_hdrlen;
587 		crypto_use_output_uio(crp, uio);
588 	} else
589 		crp->crp_digest_start = crp->crp_payload_start +
590 		    crp->crp_payload_length;
591 
592 	crp->crp_op = CRYPTO_OP_ENCRYPT | CRYPTO_OP_COMPUTE_DIGEST;
593 	crp->crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
594 
595 	memcpy(crp->crp_iv, nonce, sizeof(nonce));
596 
597 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
598 		counter_u64_add(ocf_tls13_gcm_encrypts, 1);
599 	else
600 		counter_u64_add(ocf_tls13_chacha20_encrypts, 1);
601 	if (outiov != NULL)
602 		counter_u64_add(ocf_separate_output, 1);
603 	else
604 		counter_u64_add(ocf_inplace, 1);
605 	if (tls->sync_dispatch) {
606 		error = ktls_ocf_dispatch(os, crp);
607 		crypto_destroyreq(crp);
608 	} else
609 		error = ktls_ocf_dispatch_async(state, crp);
610 	return (error);
611 }
612 
613 static int
614 ktls_ocf_tls13_aead_decrypt(struct ktls_session *tls,
615     const struct tls_record_layer *hdr, struct mbuf *m, uint64_t seqno,
616     int *trailer_len)
617 {
618 	struct tls_aead_data_13 ad;
619 	struct cryptop crp;
620 	struct ktls_ocf_session *os;
621 	int error;
622 	u_int tag_len;
623 
624 	os = tls->ocf_session;
625 
626 	tag_len = tls->params.tls_tlen - 1;
627 
628 	/* Payload must contain at least one byte for the record type. */
629 	if (ntohs(hdr->tls_length) < tag_len + 1)
630 		return (EBADMSG);
631 
632 	crypto_initreq(&crp, os->sid);
633 
634 	/* Setup the nonce. */
635 	memcpy(crp.crp_iv, tls->params.iv, tls->params.iv_len);
636 	*(uint64_t *)(crp.crp_iv + 4) ^= htobe64(seqno);
637 
638 	/* Setup the AAD. */
639 	ad.type = hdr->tls_type;
640 	ad.tls_vmajor = hdr->tls_vmajor;
641 	ad.tls_vminor = hdr->tls_vminor;
642 	ad.tls_length = hdr->tls_length;
643 	crp.crp_aad = &ad;
644 	crp.crp_aad_length = sizeof(ad);
645 
646 	crp.crp_payload_start = tls->params.tls_hlen;
647 	crp.crp_payload_length = ntohs(hdr->tls_length) - tag_len;
648 	crp.crp_digest_start = crp.crp_payload_start + crp.crp_payload_length;
649 
650 	crp.crp_op = CRYPTO_OP_DECRYPT | CRYPTO_OP_VERIFY_DIGEST;
651 	crp.crp_flags = CRYPTO_F_CBIMM | CRYPTO_F_IV_SEPARATE;
652 	crypto_use_mbuf(&crp, m);
653 
654 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
655 		counter_u64_add(ocf_tls13_gcm_decrypts, 1);
656 	else
657 		counter_u64_add(ocf_tls13_chacha20_decrypts, 1);
658 	error = ktls_ocf_dispatch(os, &crp);
659 
660 	crypto_destroyreq(&crp);
661 	*trailer_len = tag_len;
662 	return (error);
663 }
664 
665 void
666 ktls_ocf_free(struct ktls_session *tls)
667 {
668 	struct ktls_ocf_session *os;
669 
670 	os = tls->ocf_session;
671 	crypto_freesession(os->sid);
672 	mtx_destroy(&os->lock);
673 	zfree(os, M_KTLS_OCF);
674 }
675 
676 int
677 ktls_ocf_try(struct socket *so, struct ktls_session *tls, int direction)
678 {
679 	struct crypto_session_params csp, mac_csp;
680 	struct ktls_ocf_session *os;
681 	int error, mac_len;
682 
683 	memset(&csp, 0, sizeof(csp));
684 	memset(&mac_csp, 0, sizeof(mac_csp));
685 	mac_csp.csp_mode = CSP_MODE_NONE;
686 	mac_len = 0;
687 
688 	switch (tls->params.cipher_algorithm) {
689 	case CRYPTO_AES_NIST_GCM_16:
690 		switch (tls->params.cipher_key_len) {
691 		case 128 / 8:
692 		case 256 / 8:
693 			break;
694 		default:
695 			return (EINVAL);
696 		}
697 
698 		/* Only TLS 1.2 and 1.3 are supported. */
699 		if (tls->params.tls_vmajor != TLS_MAJOR_VER_ONE ||
700 		    tls->params.tls_vminor < TLS_MINOR_VER_TWO ||
701 		    tls->params.tls_vminor > TLS_MINOR_VER_THREE)
702 			return (EPROTONOSUPPORT);
703 
704 		csp.csp_flags |= CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD;
705 		csp.csp_mode = CSP_MODE_AEAD;
706 		csp.csp_cipher_alg = CRYPTO_AES_NIST_GCM_16;
707 		csp.csp_cipher_key = tls->params.cipher_key;
708 		csp.csp_cipher_klen = tls->params.cipher_key_len;
709 		csp.csp_ivlen = AES_GCM_IV_LEN;
710 		break;
711 	case CRYPTO_AES_CBC:
712 		switch (tls->params.cipher_key_len) {
713 		case 128 / 8:
714 		case 256 / 8:
715 			break;
716 		default:
717 			return (EINVAL);
718 		}
719 
720 		switch (tls->params.auth_algorithm) {
721 		case CRYPTO_SHA1_HMAC:
722 			mac_len = SHA1_HASH_LEN;
723 			break;
724 		case CRYPTO_SHA2_256_HMAC:
725 			mac_len = SHA2_256_HASH_LEN;
726 			break;
727 		case CRYPTO_SHA2_384_HMAC:
728 			mac_len = SHA2_384_HASH_LEN;
729 			break;
730 		default:
731 			return (EINVAL);
732 		}
733 
734 		/* Only TLS 1.0-1.2 are supported. */
735 		if (tls->params.tls_vmajor != TLS_MAJOR_VER_ONE ||
736 		    tls->params.tls_vminor < TLS_MINOR_VER_ZERO ||
737 		    tls->params.tls_vminor > TLS_MINOR_VER_TWO)
738 			return (EPROTONOSUPPORT);
739 
740 		/* AES-CBC is not supported for receive. */
741 		if (direction == KTLS_RX)
742 			return (EPROTONOSUPPORT);
743 
744 		csp.csp_flags |= CSP_F_SEPARATE_OUTPUT;
745 		csp.csp_mode = CSP_MODE_CIPHER;
746 		csp.csp_cipher_alg = CRYPTO_AES_CBC;
747 		csp.csp_cipher_key = tls->params.cipher_key;
748 		csp.csp_cipher_klen = tls->params.cipher_key_len;
749 		csp.csp_ivlen = AES_BLOCK_LEN;
750 
751 		mac_csp.csp_flags |= CSP_F_SEPARATE_OUTPUT;
752 		mac_csp.csp_mode = CSP_MODE_DIGEST;
753 		mac_csp.csp_auth_alg = tls->params.auth_algorithm;
754 		mac_csp.csp_auth_key = tls->params.auth_key;
755 		mac_csp.csp_auth_klen = tls->params.auth_key_len;
756 		break;
757 	case CRYPTO_CHACHA20_POLY1305:
758 		switch (tls->params.cipher_key_len) {
759 		case 256 / 8:
760 			break;
761 		default:
762 			return (EINVAL);
763 		}
764 
765 		/* Only TLS 1.2 and 1.3 are supported. */
766 		if (tls->params.tls_vmajor != TLS_MAJOR_VER_ONE ||
767 		    tls->params.tls_vminor < TLS_MINOR_VER_TWO ||
768 		    tls->params.tls_vminor > TLS_MINOR_VER_THREE)
769 			return (EPROTONOSUPPORT);
770 
771 		csp.csp_flags |= CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD;
772 		csp.csp_mode = CSP_MODE_AEAD;
773 		csp.csp_cipher_alg = CRYPTO_CHACHA20_POLY1305;
774 		csp.csp_cipher_key = tls->params.cipher_key;
775 		csp.csp_cipher_klen = tls->params.cipher_key_len;
776 		csp.csp_ivlen = CHACHA20_POLY1305_IV_LEN;
777 		break;
778 	default:
779 		return (EPROTONOSUPPORT);
780 	}
781 
782 	os = malloc(sizeof(*os), M_KTLS_OCF, M_NOWAIT | M_ZERO);
783 	if (os == NULL)
784 		return (ENOMEM);
785 
786 	error = crypto_newsession(&os->sid, &csp,
787 	    CRYPTO_FLAG_HARDWARE | CRYPTO_FLAG_SOFTWARE);
788 	if (error) {
789 		free(os, M_KTLS_OCF);
790 		return (error);
791 	}
792 
793 	if (mac_csp.csp_mode != CSP_MODE_NONE) {
794 		error = crypto_newsession(&os->mac_sid, &mac_csp,
795 		    CRYPTO_FLAG_HARDWARE | CRYPTO_FLAG_SOFTWARE);
796 		if (error) {
797 			crypto_freesession(os->sid);
798 			free(os, M_KTLS_OCF);
799 			return (error);
800 		}
801 		os->mac_len = mac_len;
802 	}
803 
804 	mtx_init(&os->lock, "ktls_ocf", NULL, MTX_DEF);
805 	tls->ocf_session = os;
806 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16 ||
807 	    tls->params.cipher_algorithm == CRYPTO_CHACHA20_POLY1305) {
808 		if (direction == KTLS_TX) {
809 			if (tls->params.tls_vminor == TLS_MINOR_VER_THREE)
810 				tls->sw_encrypt = ktls_ocf_tls13_aead_encrypt;
811 			else
812 				tls->sw_encrypt = ktls_ocf_tls12_aead_encrypt;
813 		} else {
814 			if (tls->params.tls_vminor == TLS_MINOR_VER_THREE)
815 				tls->sw_decrypt = ktls_ocf_tls13_aead_decrypt;
816 			else
817 				tls->sw_decrypt = ktls_ocf_tls12_aead_decrypt;
818 		}
819 	} else {
820 		tls->sw_encrypt = ktls_ocf_tls_cbc_encrypt;
821 		if (tls->params.tls_vminor == TLS_MINOR_VER_ZERO) {
822 			os->implicit_iv = true;
823 			memcpy(os->iv, tls->params.iv, AES_BLOCK_LEN);
824 #ifdef INVARIANTS
825 			os->next_seqno = tls->next_seqno;
826 #endif
827 		}
828 	}
829 
830 	/*
831 	 * AES-CBC is always synchronous currently.  Asynchronous
832 	 * operation would require multiple callbacks and an additional
833 	 * iovec array in ktls_ocf_encrypt_state.
834 	 */
835 	tls->sync_dispatch = CRYPTO_SESS_SYNC(os->sid) ||
836 	    tls->params.cipher_algorithm == CRYPTO_AES_CBC;
837 	return (0);
838 }
839