xref: /freebsd/sys/dev/cxgbe/crypto/t4_keyctx.c (revision 8ddb146abcdf061be9f2c0db7e391697dafad85c)
1 /*-
2  * Copyright (c) 2017-2019 Chelsio Communications, Inc.
3  * All rights reserved.
4  * Written by: John Baldwin <jhb@FreeBSD.org>
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  */
27 
28 #include "opt_kern_tls.h"
29 
30 #include <sys/cdefs.h>
31 __FBSDID("$FreeBSD$");
32 
33 #include <sys/types.h>
34 #include <sys/ktls.h>
35 #include <sys/malloc.h>
36 
37 #include <opencrypto/cryptodev.h>
38 #include <opencrypto/xform.h>
39 
40 #include "common/common.h"
41 #include "crypto/t4_crypto.h"
42 
43 /*
44  * Crypto operations use a key context to store cipher keys and
45  * partial hash digests.  They can either be passed inline as part of
46  * a work request using crypto or they can be stored in card RAM.  For
47  * the latter case, work requests must replace the inline key context
48  * with a request to read the context from card RAM.
49  *
50  * The format of a key context:
51  *
52  * +-------------------------------+
53  * | key context header            |
54  * +-------------------------------+
55  * | AES key                       |  ----- For requests with AES
56  * +-------------------------------+
57  * | Hash state                    |  ----- For hash-only requests
58  * +-------------------------------+ -
59  * | IPAD (16-byte aligned)        |  \
60  * +-------------------------------+  +---- For requests with HMAC
61  * | OPAD (16-byte aligned)        |  /
62  * +-------------------------------+ -
63  * | GMAC H                        |  ----- For AES-GCM
64  * +-------------------------------+ -
65  */
66 
67 /* Fields in the key context header. */
68 #define S_TLS_KEYCTX_TX_WR_DUALCK    12
69 #define M_TLS_KEYCTX_TX_WR_DUALCK    0x1
70 #define V_TLS_KEYCTX_TX_WR_DUALCK(x) ((x) << S_TLS_KEYCTX_TX_WR_DUALCK)
71 #define G_TLS_KEYCTX_TX_WR_DUALCK(x) \
72     (((x) >> S_TLS_KEYCTX_TX_WR_DUALCK) & M_TLS_KEYCTX_TX_WR_DUALCK)
73 #define F_TLS_KEYCTX_TX_WR_DUALCK    V_TLS_KEYCTX_TX_WR_DUALCK(1U)
74 
75 #define S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT 11
76 #define M_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT 0x1
77 #define V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(x) \
78     ((x) << S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT)
79 #define G_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(x) \
80     (((x) >> S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT) & \
81      M_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT)
82 #define F_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT \
83     V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(1U)
84 
85 #define S_TLS_KEYCTX_TX_WR_SALT_PRESENT 10
86 #define M_TLS_KEYCTX_TX_WR_SALT_PRESENT 0x1
87 #define V_TLS_KEYCTX_TX_WR_SALT_PRESENT(x) \
88     ((x) << S_TLS_KEYCTX_TX_WR_SALT_PRESENT)
89 #define G_TLS_KEYCTX_TX_WR_SALT_PRESENT(x) \
90     (((x) >> S_TLS_KEYCTX_TX_WR_SALT_PRESENT) & \
91      M_TLS_KEYCTX_TX_WR_SALT_PRESENT)
92 #define F_TLS_KEYCTX_TX_WR_SALT_PRESENT \
93     V_TLS_KEYCTX_TX_WR_SALT_PRESENT(1U)
94 
95 #define S_TLS_KEYCTX_TX_WR_TXCK_SIZE 6
96 #define M_TLS_KEYCTX_TX_WR_TXCK_SIZE 0xf
97 #define V_TLS_KEYCTX_TX_WR_TXCK_SIZE(x) \
98     ((x) << S_TLS_KEYCTX_TX_WR_TXCK_SIZE)
99 #define G_TLS_KEYCTX_TX_WR_TXCK_SIZE(x) \
100     (((x) >> S_TLS_KEYCTX_TX_WR_TXCK_SIZE) & \
101      M_TLS_KEYCTX_TX_WR_TXCK_SIZE)
102 
103 #define S_TLS_KEYCTX_TX_WR_TXMK_SIZE 2
104 #define M_TLS_KEYCTX_TX_WR_TXMK_SIZE 0xf
105 #define V_TLS_KEYCTX_TX_WR_TXMK_SIZE(x) \
106     ((x) << S_TLS_KEYCTX_TX_WR_TXMK_SIZE)
107 #define G_TLS_KEYCTX_TX_WR_TXMK_SIZE(x) \
108     (((x) >> S_TLS_KEYCTX_TX_WR_TXMK_SIZE) & \
109      M_TLS_KEYCTX_TX_WR_TXMK_SIZE)
110 
111 #define S_TLS_KEYCTX_TX_WR_TXVALID   0
112 #define M_TLS_KEYCTX_TX_WR_TXVALID   0x1
113 #define V_TLS_KEYCTX_TX_WR_TXVALID(x) \
114     ((x) << S_TLS_KEYCTX_TX_WR_TXVALID)
115 #define G_TLS_KEYCTX_TX_WR_TXVALID(x) \
116     (((x) >> S_TLS_KEYCTX_TX_WR_TXVALID) & M_TLS_KEYCTX_TX_WR_TXVALID)
117 #define F_TLS_KEYCTX_TX_WR_TXVALID   V_TLS_KEYCTX_TX_WR_TXVALID(1U)
118 
119 #define S_TLS_KEYCTX_TX_WR_FLITCNT   3
120 #define M_TLS_KEYCTX_TX_WR_FLITCNT   0x1f
121 #define V_TLS_KEYCTX_TX_WR_FLITCNT(x) \
122     ((x) << S_TLS_KEYCTX_TX_WR_FLITCNT)
123 #define G_TLS_KEYCTX_TX_WR_FLITCNT(x) \
124     (((x) >> S_TLS_KEYCTX_TX_WR_FLITCNT) & M_TLS_KEYCTX_TX_WR_FLITCNT)
125 
126 #define S_TLS_KEYCTX_TX_WR_HMACCTRL  0
127 #define M_TLS_KEYCTX_TX_WR_HMACCTRL  0x7
128 #define V_TLS_KEYCTX_TX_WR_HMACCTRL(x) \
129     ((x) << S_TLS_KEYCTX_TX_WR_HMACCTRL)
130 #define G_TLS_KEYCTX_TX_WR_HMACCTRL(x) \
131     (((x) >> S_TLS_KEYCTX_TX_WR_HMACCTRL) & M_TLS_KEYCTX_TX_WR_HMACCTRL)
132 
133 #define S_TLS_KEYCTX_TX_WR_PROTOVER  4
134 #define M_TLS_KEYCTX_TX_WR_PROTOVER  0xf
135 #define V_TLS_KEYCTX_TX_WR_PROTOVER(x) \
136     ((x) << S_TLS_KEYCTX_TX_WR_PROTOVER)
137 #define G_TLS_KEYCTX_TX_WR_PROTOVER(x) \
138     (((x) >> S_TLS_KEYCTX_TX_WR_PROTOVER) & M_TLS_KEYCTX_TX_WR_PROTOVER)
139 
140 #define S_TLS_KEYCTX_TX_WR_CIPHMODE  0
141 #define M_TLS_KEYCTX_TX_WR_CIPHMODE  0xf
142 #define V_TLS_KEYCTX_TX_WR_CIPHMODE(x) \
143     ((x) << S_TLS_KEYCTX_TX_WR_CIPHMODE)
144 #define G_TLS_KEYCTX_TX_WR_CIPHMODE(x) \
145     (((x) >> S_TLS_KEYCTX_TX_WR_CIPHMODE) & M_TLS_KEYCTX_TX_WR_CIPHMODE)
146 
147 #define S_TLS_KEYCTX_TX_WR_AUTHMODE  4
148 #define M_TLS_KEYCTX_TX_WR_AUTHMODE  0xf
149 #define V_TLS_KEYCTX_TX_WR_AUTHMODE(x) \
150     ((x) << S_TLS_KEYCTX_TX_WR_AUTHMODE)
151 #define G_TLS_KEYCTX_TX_WR_AUTHMODE(x) \
152     (((x) >> S_TLS_KEYCTX_TX_WR_AUTHMODE) & M_TLS_KEYCTX_TX_WR_AUTHMODE)
153 
154 #define S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL 3
155 #define M_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL 0x1
156 #define V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(x) \
157     ((x) << S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL)
158 #define G_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(x) \
159     (((x) >> S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL) & \
160      M_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL)
161 #define F_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL \
162     V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(1U)
163 
164 #define S_TLS_KEYCTX_TX_WR_SEQNUMCTRL 1
165 #define M_TLS_KEYCTX_TX_WR_SEQNUMCTRL 0x3
166 #define V_TLS_KEYCTX_TX_WR_SEQNUMCTRL(x) \
167     ((x) << S_TLS_KEYCTX_TX_WR_SEQNUMCTRL)
168 #define G_TLS_KEYCTX_TX_WR_SEQNUMCTRL(x) \
169     (((x) >> S_TLS_KEYCTX_TX_WR_SEQNUMCTRL) & \
170      M_TLS_KEYCTX_TX_WR_SEQNUMCTRL)
171 
172 #define S_TLS_KEYCTX_TX_WR_RXVALID   0
173 #define M_TLS_KEYCTX_TX_WR_RXVALID   0x1
174 #define V_TLS_KEYCTX_TX_WR_RXVALID(x) \
175     ((x) << S_TLS_KEYCTX_TX_WR_RXVALID)
176 #define G_TLS_KEYCTX_TX_WR_RXVALID(x) \
177     (((x) >> S_TLS_KEYCTX_TX_WR_RXVALID) & M_TLS_KEYCTX_TX_WR_RXVALID)
178 #define F_TLS_KEYCTX_TX_WR_RXVALID   V_TLS_KEYCTX_TX_WR_RXVALID(1U)
179 
180 #define S_TLS_KEYCTX_TX_WR_IVPRESENT 7
181 #define M_TLS_KEYCTX_TX_WR_IVPRESENT 0x1
182 #define V_TLS_KEYCTX_TX_WR_IVPRESENT(x) \
183     ((x) << S_TLS_KEYCTX_TX_WR_IVPRESENT)
184 #define G_TLS_KEYCTX_TX_WR_IVPRESENT(x) \
185     (((x) >> S_TLS_KEYCTX_TX_WR_IVPRESENT) & \
186      M_TLS_KEYCTX_TX_WR_IVPRESENT)
187 #define F_TLS_KEYCTX_TX_WR_IVPRESENT V_TLS_KEYCTX_TX_WR_IVPRESENT(1U)
188 
189 #define S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT 6
190 #define M_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT 0x1
191 #define V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(x) \
192     ((x) << S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT)
193 #define G_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(x) \
194     (((x) >> S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT) & \
195      M_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT)
196 #define F_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT \
197     V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(1U)
198 
199 #define S_TLS_KEYCTX_TX_WR_RXCK_SIZE 3
200 #define M_TLS_KEYCTX_TX_WR_RXCK_SIZE 0x7
201 #define V_TLS_KEYCTX_TX_WR_RXCK_SIZE(x) \
202     ((x) << S_TLS_KEYCTX_TX_WR_RXCK_SIZE)
203 #define G_TLS_KEYCTX_TX_WR_RXCK_SIZE(x) \
204     (((x) >> S_TLS_KEYCTX_TX_WR_RXCK_SIZE) & \
205      M_TLS_KEYCTX_TX_WR_RXCK_SIZE)
206 
207 #define S_TLS_KEYCTX_TX_WR_RXMK_SIZE 0
208 #define M_TLS_KEYCTX_TX_WR_RXMK_SIZE 0x7
209 #define V_TLS_KEYCTX_TX_WR_RXMK_SIZE(x) \
210     ((x) << S_TLS_KEYCTX_TX_WR_RXMK_SIZE)
211 #define G_TLS_KEYCTX_TX_WR_RXMK_SIZE(x) \
212     (((x) >> S_TLS_KEYCTX_TX_WR_RXMK_SIZE) & \
213      M_TLS_KEYCTX_TX_WR_RXMK_SIZE)
214 
215 #define S_TLS_KEYCTX_TX_WR_IVINSERT  55
216 #define M_TLS_KEYCTX_TX_WR_IVINSERT  0x1ffULL
217 #define V_TLS_KEYCTX_TX_WR_IVINSERT(x) \
218     ((x) << S_TLS_KEYCTX_TX_WR_IVINSERT)
219 #define G_TLS_KEYCTX_TX_WR_IVINSERT(x) \
220     (((x) >> S_TLS_KEYCTX_TX_WR_IVINSERT) & M_TLS_KEYCTX_TX_WR_IVINSERT)
221 
222 #define S_TLS_KEYCTX_TX_WR_AADSTRTOFST 47
223 #define M_TLS_KEYCTX_TX_WR_AADSTRTOFST 0xffULL
224 #define V_TLS_KEYCTX_TX_WR_AADSTRTOFST(x) \
225     ((x) << S_TLS_KEYCTX_TX_WR_AADSTRTOFST)
226 #define G_TLS_KEYCTX_TX_WR_AADSTRTOFST(x) \
227     (((x) >> S_TLS_KEYCTX_TX_WR_AADSTRTOFST) & \
228      M_TLS_KEYCTX_TX_WR_AADSTRTOFST)
229 
230 #define S_TLS_KEYCTX_TX_WR_AADSTOPOFST 39
231 #define M_TLS_KEYCTX_TX_WR_AADSTOPOFST 0xffULL
232 #define V_TLS_KEYCTX_TX_WR_AADSTOPOFST(x) \
233     ((x) << S_TLS_KEYCTX_TX_WR_AADSTOPOFST)
234 #define G_TLS_KEYCTX_TX_WR_AADSTOPOFST(x) \
235     (((x) >> S_TLS_KEYCTX_TX_WR_AADSTOPOFST) & \
236      M_TLS_KEYCTX_TX_WR_AADSTOPOFST)
237 
238 #define S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST 30
239 #define M_TLS_KEYCTX_TX_WR_CIPHERSRTOFST 0x1ffULL
240 #define V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(x) \
241     ((x) << S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST)
242 #define G_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(x) \
243     (((x) >> S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST) & \
244      M_TLS_KEYCTX_TX_WR_CIPHERSRTOFST)
245 
246 #define S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST 23
247 #define M_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST 0x7f
248 #define V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(x) \
249     ((x) << S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST)
250 #define G_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(x) \
251     (((x) >> S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST) & \
252      M_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST)
253 
254 #define S_TLS_KEYCTX_TX_WR_AUTHSRTOFST 14
255 #define M_TLS_KEYCTX_TX_WR_AUTHSRTOFST 0x1ff
256 #define V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(x) \
257     ((x) << S_TLS_KEYCTX_TX_WR_AUTHSRTOFST)
258 #define G_TLS_KEYCTX_TX_WR_AUTHSRTOFST(x) \
259     (((x) >> S_TLS_KEYCTX_TX_WR_AUTHSRTOFST) & \
260      M_TLS_KEYCTX_TX_WR_AUTHSRTOFST)
261 
262 #define S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST 7
263 #define M_TLS_KEYCTX_TX_WR_AUTHSTOPOFST 0x7f
264 #define V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(x) \
265     ((x) << S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST)
266 #define G_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(x) \
267     (((x) >> S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST) & \
268      M_TLS_KEYCTX_TX_WR_AUTHSTOPOFST)
269 
270 #define S_TLS_KEYCTX_TX_WR_AUTHINSRT 0
271 #define M_TLS_KEYCTX_TX_WR_AUTHINSRT 0x7f
272 #define V_TLS_KEYCTX_TX_WR_AUTHINSRT(x) \
273     ((x) << S_TLS_KEYCTX_TX_WR_AUTHINSRT)
274 #define G_TLS_KEYCTX_TX_WR_AUTHINSRT(x) \
275     (((x) >> S_TLS_KEYCTX_TX_WR_AUTHINSRT) & \
276      M_TLS_KEYCTX_TX_WR_AUTHINSRT)
277 
278 /* Key Context Programming Operation type */
279 #define KEY_WRITE_RX			0x1
280 #define KEY_WRITE_TX			0x2
281 #define KEY_DELETE_RX			0x4
282 #define KEY_DELETE_TX			0x8
283 
284 #define S_KEY_CLR_LOC		4
285 #define M_KEY_CLR_LOC		0xf
286 #define V_KEY_CLR_LOC(x)	((x) << S_KEY_CLR_LOC)
287 #define G_KEY_CLR_LOC(x)	(((x) >> S_KEY_CLR_LOC) & M_KEY_CLR_LOC)
288 #define F_KEY_CLR_LOC		V_KEY_CLR_LOC(1U)
289 
290 #define S_KEY_GET_LOC           0
291 #define M_KEY_GET_LOC           0xf
292 #define V_KEY_GET_LOC(x)        ((x) << S_KEY_GET_LOC)
293 #define G_KEY_GET_LOC(x)        (((x) >> S_KEY_GET_LOC) & M_KEY_GET_LOC)
294 
295 /*
296  * Generate the initial GMAC hash state for a AES-GCM key.
297  *
298  * Borrowed from AES_GMAC_Setkey().
299  */
300 void
301 t4_init_gmac_hash(const char *key, int klen, char *ghash)
302 {
303 	static char zeroes[GMAC_BLOCK_LEN];
304 	uint32_t keysched[4 * (RIJNDAEL_MAXNR + 1)];
305 	int rounds;
306 
307 	rounds = rijndaelKeySetupEnc(keysched, key, klen * 8);
308 	rijndaelEncrypt(keysched, rounds, zeroes, ghash);
309 	explicit_bzero(keysched, sizeof(keysched));
310 }
311 
312 /* Copy out the partial hash state from a software hash implementation. */
313 void
314 t4_copy_partial_hash(int alg, union authctx *auth_ctx, void *dst)
315 {
316 	uint32_t *u32;
317 	uint64_t *u64;
318 	u_int i;
319 
320 	u32 = (uint32_t *)dst;
321 	u64 = (uint64_t *)dst;
322 	switch (alg) {
323 	case CRYPTO_SHA1:
324 	case CRYPTO_SHA1_HMAC:
325 		for (i = 0; i < SHA1_HASH_LEN / 4; i++)
326 			u32[i] = htobe32(auth_ctx->sha1ctx.h.b32[i]);
327 		break;
328 	case CRYPTO_SHA2_224:
329 	case CRYPTO_SHA2_224_HMAC:
330 		for (i = 0; i < SHA2_256_HASH_LEN / 4; i++)
331 			u32[i] = htobe32(auth_ctx->sha224ctx.state[i]);
332 		break;
333 	case CRYPTO_SHA2_256:
334 	case CRYPTO_SHA2_256_HMAC:
335 		for (i = 0; i < SHA2_256_HASH_LEN / 4; i++)
336 			u32[i] = htobe32(auth_ctx->sha256ctx.state[i]);
337 		break;
338 	case CRYPTO_SHA2_384:
339 	case CRYPTO_SHA2_384_HMAC:
340 		for (i = 0; i < SHA2_512_HASH_LEN / 8; i++)
341 			u64[i] = htobe64(auth_ctx->sha384ctx.state[i]);
342 		break;
343 	case CRYPTO_SHA2_512:
344 	case CRYPTO_SHA2_512_HMAC:
345 		for (i = 0; i < SHA2_512_HASH_LEN / 8; i++)
346 			u64[i] = htobe64(auth_ctx->sha512ctx.state[i]);
347 		break;
348 	}
349 }
350 
351 void
352 t4_init_hmac_digest(const struct auth_hash *axf, u_int partial_digest_len,
353     const char *key, int klen, char *dst)
354 {
355 	union authctx auth_ctx;
356 
357 	hmac_init_ipad(axf, key, klen, &auth_ctx);
358 	t4_copy_partial_hash(axf->type, &auth_ctx, dst);
359 
360 	dst += roundup2(partial_digest_len, 16);
361 
362 	hmac_init_opad(axf, key, klen, &auth_ctx);
363 	t4_copy_partial_hash(axf->type, &auth_ctx, dst);
364 
365 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
366 }
367 
368 /*
369  * Borrowed from cesa_prep_aes_key().
370  *
371  * NB: The crypto engine wants the words in the decryption key in reverse
372  * order.
373  */
374 void
375 t4_aes_getdeckey(void *dec_key, const void *enc_key, unsigned int kbits)
376 {
377 	uint32_t ek[4 * (RIJNDAEL_MAXNR + 1)];
378 	uint32_t *dkey;
379 	int i;
380 
381 	rijndaelKeySetupEnc(ek, enc_key, kbits);
382 	dkey = dec_key;
383 	dkey += (kbits / 8) / 4;
384 
385 	switch (kbits) {
386 	case 128:
387 		for (i = 0; i < 4; i++)
388 			*--dkey = htobe32(ek[4 * 10 + i]);
389 		break;
390 	case 192:
391 		for (i = 0; i < 2; i++)
392 			*--dkey = htobe32(ek[4 * 11 + 2 + i]);
393 		for (i = 0; i < 4; i++)
394 			*--dkey = htobe32(ek[4 * 12 + i]);
395 		break;
396 	case 256:
397 		for (i = 0; i < 4; i++)
398 			*--dkey = htobe32(ek[4 * 13 + i]);
399 		for (i = 0; i < 4; i++)
400 			*--dkey = htobe32(ek[4 * 14 + i]);
401 		break;
402 	}
403 	MPASS(dkey == dec_key);
404 	explicit_bzero(ek, sizeof(ek));
405 }
406 
407 #ifdef KERN_TLS
408 /*
409  * - keyid management
410  * - request to program key?
411  */
412 u_int
413 t4_tls_key_info_size(const struct ktls_session *tls)
414 {
415 	u_int key_info_size, mac_key_size;
416 
417 	key_info_size = sizeof(struct tx_keyctx_hdr) +
418 	    tls->params.cipher_key_len;
419 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
420 		key_info_size += GMAC_BLOCK_LEN;
421 	} else {
422 		switch (tls->params.auth_algorithm) {
423 		case CRYPTO_SHA1_HMAC:
424 			mac_key_size = SHA1_HASH_LEN;
425 			break;
426 		case CRYPTO_SHA2_256_HMAC:
427 			mac_key_size = SHA2_256_HASH_LEN;
428 			break;
429 		case CRYPTO_SHA2_384_HMAC:
430 			mac_key_size = SHA2_512_HASH_LEN;
431 			break;
432 		default:
433 			__assert_unreachable();
434 		}
435 		key_info_size += roundup2(mac_key_size, 16) * 2;
436 	}
437 	return (key_info_size);
438 }
439 
440 int
441 t4_tls_proto_ver(const struct ktls_session *tls)
442 {
443 	if (tls->params.tls_vminor == TLS_MINOR_VER_ONE)
444 		return (SCMD_PROTO_VERSION_TLS_1_1);
445 	else
446 		return (SCMD_PROTO_VERSION_TLS_1_2);
447 }
448 
449 int
450 t4_tls_cipher_mode(const struct ktls_session *tls)
451 {
452 	switch (tls->params.cipher_algorithm) {
453 	case CRYPTO_AES_CBC:
454 		return (SCMD_CIPH_MODE_AES_CBC);
455 	case CRYPTO_AES_NIST_GCM_16:
456 		return (SCMD_CIPH_MODE_AES_GCM);
457 	default:
458 		return (SCMD_CIPH_MODE_NOP);
459 	}
460 }
461 
462 int
463 t4_tls_auth_mode(const struct ktls_session *tls)
464 {
465 	switch (tls->params.cipher_algorithm) {
466 	case CRYPTO_AES_CBC:
467 		switch (tls->params.auth_algorithm) {
468 		case CRYPTO_SHA1_HMAC:
469 			return (SCMD_AUTH_MODE_SHA1);
470 		case CRYPTO_SHA2_256_HMAC:
471 			return (SCMD_AUTH_MODE_SHA256);
472 		case CRYPTO_SHA2_384_HMAC:
473 			return (SCMD_AUTH_MODE_SHA512_384);
474 		default:
475 			return (SCMD_AUTH_MODE_NOP);
476 		}
477 	case CRYPTO_AES_NIST_GCM_16:
478 		return (SCMD_AUTH_MODE_GHASH);
479 	default:
480 		return (SCMD_AUTH_MODE_NOP);
481 	}
482 }
483 
484 int
485 t4_tls_hmac_ctrl(const struct ktls_session *tls)
486 {
487 	switch (tls->params.cipher_algorithm) {
488 	case CRYPTO_AES_CBC:
489 		return (SCMD_HMAC_CTRL_NO_TRUNC);
490 	case CRYPTO_AES_NIST_GCM_16:
491 		return (SCMD_HMAC_CTRL_NOP);
492 	default:
493 		return (SCMD_HMAC_CTRL_NOP);
494 	}
495 }
496 
497 static int
498 tls_cipher_key_size(const struct ktls_session *tls)
499 {
500 	switch (tls->params.cipher_key_len) {
501 	case 128 / 8:
502 		return (CHCR_KEYCTX_CIPHER_KEY_SIZE_128);
503 	case 192 / 8:
504 		return (CHCR_KEYCTX_CIPHER_KEY_SIZE_192);
505 	case 256 / 8:
506 		return (CHCR_KEYCTX_CIPHER_KEY_SIZE_256);
507 	default:
508 		__assert_unreachable();
509 	}
510 }
511 
512 static int
513 tls_mac_key_size(const struct ktls_session *tls)
514 {
515 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
516 		return (CHCR_KEYCTX_MAC_KEY_SIZE_512);
517 	else {
518 		switch (tls->params.auth_algorithm) {
519 		case CRYPTO_SHA1_HMAC:
520 			return (CHCR_KEYCTX_MAC_KEY_SIZE_160);
521 		case CRYPTO_SHA2_256_HMAC:
522 			return (CHCR_KEYCTX_MAC_KEY_SIZE_256);
523 		case CRYPTO_SHA2_384_HMAC:
524 			return (CHCR_KEYCTX_MAC_KEY_SIZE_512);
525 		default:
526 			__assert_unreachable();
527 		}
528 	}
529 }
530 
531 void
532 t4_tls_key_ctx(const struct ktls_session *tls, int direction,
533     struct tls_keyctx *kctx)
534 {
535 	const struct auth_hash *axf;
536 	u_int mac_key_size;
537 	char *hash;
538 
539 	/* Key context header. */
540 	if (direction == KTLS_TX) {
541 		kctx->u.txhdr.ctxlen = t4_tls_key_info_size(tls) / 16;
542 		kctx->u.txhdr.dualck_to_txvalid =
543 		    V_TLS_KEYCTX_TX_WR_SALT_PRESENT(1) |
544 		    V_TLS_KEYCTX_TX_WR_TXCK_SIZE(tls_cipher_key_size(tls)) |
545 		    V_TLS_KEYCTX_TX_WR_TXMK_SIZE(tls_mac_key_size(tls)) |
546 		    V_TLS_KEYCTX_TX_WR_TXVALID(1);
547 		if (tls->params.cipher_algorithm == CRYPTO_AES_CBC)
548 			kctx->u.txhdr.dualck_to_txvalid |=
549 			    V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(1);
550 		kctx->u.txhdr.dualck_to_txvalid =
551 		    htobe16(kctx->u.txhdr.dualck_to_txvalid);
552 	} else {
553 		kctx->u.rxhdr.flitcnt_hmacctrl =
554 		    V_TLS_KEYCTX_TX_WR_FLITCNT(t4_tls_key_info_size(tls) / 16) |
555 		    V_TLS_KEYCTX_TX_WR_HMACCTRL(t4_tls_hmac_ctrl(tls));
556 
557 		kctx->u.rxhdr.protover_ciphmode =
558 		    V_TLS_KEYCTX_TX_WR_PROTOVER(t4_tls_proto_ver(tls)) |
559 		    V_TLS_KEYCTX_TX_WR_CIPHMODE(t4_tls_cipher_mode(tls));
560 
561 		kctx->u.rxhdr.authmode_to_rxvalid =
562 		    V_TLS_KEYCTX_TX_WR_AUTHMODE(t4_tls_auth_mode(tls)) |
563 		    V_TLS_KEYCTX_TX_WR_SEQNUMCTRL(3) |
564 		    V_TLS_KEYCTX_TX_WR_RXVALID(1);
565 
566 		kctx->u.rxhdr.ivpresent_to_rxmk_size =
567 		    V_TLS_KEYCTX_TX_WR_IVPRESENT(0) |
568 		    V_TLS_KEYCTX_TX_WR_RXCK_SIZE(tls_cipher_key_size(tls)) |
569 		    V_TLS_KEYCTX_TX_WR_RXMK_SIZE(tls_mac_key_size(tls));
570 
571 		if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
572 			kctx->u.rxhdr.ivinsert_to_authinsrt =
573 			    htobe64(V_TLS_KEYCTX_TX_WR_IVINSERT(6ULL) |
574 				V_TLS_KEYCTX_TX_WR_AADSTRTOFST(1ULL) |
575 				V_TLS_KEYCTX_TX_WR_AADSTOPOFST(5ULL) |
576 				V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(14ULL) |
577 				V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(16ULL) |
578 				V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(14ULL) |
579 				V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(0ULL) |
580 				V_TLS_KEYCTX_TX_WR_AUTHINSRT(16ULL));
581 		} else {
582 			kctx->u.rxhdr.authmode_to_rxvalid |=
583 			    V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(1);
584 			kctx->u.rxhdr.ivpresent_to_rxmk_size |=
585 			    V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(1);
586 			kctx->u.rxhdr.ivinsert_to_authinsrt =
587 			    htobe64(V_TLS_KEYCTX_TX_WR_IVINSERT(6ULL) |
588 				V_TLS_KEYCTX_TX_WR_AADSTRTOFST(1ULL) |
589 				V_TLS_KEYCTX_TX_WR_AADSTOPOFST(5ULL) |
590 				V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(22ULL) |
591 				V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(0ULL) |
592 				V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(22ULL) |
593 				V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(0ULL) |
594 				V_TLS_KEYCTX_TX_WR_AUTHINSRT(0ULL));
595 		}
596 	}
597 
598 	/* Key. */
599 	if (direction == KTLS_RX &&
600 	    tls->params.cipher_algorithm == CRYPTO_AES_CBC)
601 		t4_aes_getdeckey(kctx->keys.edkey, tls->params.cipher_key,
602 		    tls->params.cipher_key_len * 8);
603 	else
604 		memcpy(kctx->keys.edkey, tls->params.cipher_key,
605 		    tls->params.cipher_key_len);
606 
607 	/* Auth state and implicit IV (salt). */
608 	hash = kctx->keys.edkey + tls->params.cipher_key_len;
609 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
610 		_Static_assert(offsetof(struct tx_keyctx_hdr, txsalt) ==
611 		    offsetof(struct rx_keyctx_hdr, rxsalt),
612 		    "salt offset mismatch");
613 		memcpy(kctx->u.txhdr.txsalt, tls->params.iv, SALT_SIZE);
614 		t4_init_gmac_hash(tls->params.cipher_key,
615 		    tls->params.cipher_key_len, hash);
616 	} else {
617 		switch (tls->params.auth_algorithm) {
618 		case CRYPTO_SHA1_HMAC:
619 			axf = &auth_hash_hmac_sha1;
620 			mac_key_size = SHA1_HASH_LEN;
621 			break;
622 		case CRYPTO_SHA2_256_HMAC:
623 			axf = &auth_hash_hmac_sha2_256;
624 			mac_key_size = SHA2_256_HASH_LEN;
625 			break;
626 		case CRYPTO_SHA2_384_HMAC:
627 			axf = &auth_hash_hmac_sha2_384;
628 			mac_key_size = SHA2_512_HASH_LEN;
629 			break;
630 		default:
631 			__assert_unreachable();
632 		}
633 		t4_init_hmac_digest(axf, mac_key_size, tls->params.auth_key,
634 		    tls->params.auth_key_len, hash);
635 	}
636 }
637 
638 int
639 t4_alloc_tls_keyid(struct adapter *sc)
640 {
641 	vmem_addr_t addr;
642 
643 	if (sc->vres.key.size == 0)
644 		return (-1);
645 
646 	if (vmem_alloc(sc->key_map, TLS_KEY_CONTEXT_SZ, M_NOWAIT | M_FIRSTFIT,
647 	    &addr) != 0)
648 		return (-1);
649 
650 	return (addr);
651 }
652 
653 void
654 t4_free_tls_keyid(struct adapter *sc, int keyid)
655 {
656 	vmem_free(sc->key_map, keyid, TLS_KEY_CONTEXT_SZ);
657 }
658 
659 void
660 t4_write_tlskey_wr(const struct ktls_session *tls, int direction, int tid,
661     int flags, int keyid, struct tls_key_req *kwr)
662 {
663 	kwr->wr_hi = htobe32(V_FW_WR_OP(FW_ULPTX_WR) | F_FW_WR_ATOMIC | flags);
664 	kwr->wr_mid = htobe32(V_FW_WR_LEN16(DIV_ROUND_UP(TLS_KEY_WR_SZ, 16)) |
665 	    V_FW_WR_FLOWID(tid));
666 	kwr->protocol = t4_tls_proto_ver(tls);
667 	kwr->mfs = htobe16(tls->params.max_frame_len);
668 	kwr->reneg_to_write_rx = V_KEY_GET_LOC(direction == KTLS_TX ?
669 	    KEY_WRITE_TX : KEY_WRITE_RX);
670 
671 	/* master command */
672 	kwr->cmd = htobe32(V_ULPTX_CMD(ULP_TX_MEM_WRITE) |
673 	    V_T5_ULP_MEMIO_ORDER(1) | V_T5_ULP_MEMIO_IMM(1));
674 	kwr->dlen = htobe32(V_ULP_MEMIO_DATA_LEN(TLS_KEY_CONTEXT_SZ >> 5));
675 	kwr->len16 = htobe32((tid << 8) |
676 	    DIV_ROUND_UP(TLS_KEY_WR_SZ - sizeof(struct work_request_hdr), 16));
677 	kwr->kaddr = htobe32(V_ULP_MEMIO_ADDR(keyid >> 5));
678 
679 	/* sub command */
680 	kwr->sc_more = htobe32(V_ULPTX_CMD(ULP_TX_SC_IMM));
681 	kwr->sc_len = htobe32(TLS_KEY_CONTEXT_SZ);
682 }
683 #endif
684