xref: /freebsd/sys/dev/cxgbe/crypto/t4_keyctx.c (revision d0b2dbfa0ecf2bbc9709efc5e20baf8e4b44bbbf)
1 /*-
2  * Copyright (c) 2017-2019 Chelsio Communications, Inc.
3  * All rights reserved.
4  * Written by: John Baldwin <jhb@FreeBSD.org>
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  *
15  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25  * SUCH DAMAGE.
26  */
27 
28 #include "opt_kern_tls.h"
29 
30 #include <sys/cdefs.h>
31 #include <sys/types.h>
32 #include <sys/ktls.h>
33 #include <sys/malloc.h>
34 
35 #include <opencrypto/cryptodev.h>
36 #include <opencrypto/xform.h>
37 
38 #include "common/common.h"
39 #include "crypto/t4_crypto.h"
40 
41 /*
42  * Crypto operations use a key context to store cipher keys and
43  * partial hash digests.  They can either be passed inline as part of
44  * a work request using crypto or they can be stored in card RAM.  For
45  * the latter case, work requests must replace the inline key context
46  * with a request to read the context from card RAM.
47  *
48  * The format of a key context:
49  *
50  * +-------------------------------+
51  * | key context header            |
52  * +-------------------------------+
53  * | AES key                       |  ----- For requests with AES
54  * +-------------------------------+
55  * | Hash state                    |  ----- For hash-only requests
56  * +-------------------------------+ -
57  * | IPAD (16-byte aligned)        |  \
58  * +-------------------------------+  +---- For requests with HMAC
59  * | OPAD (16-byte aligned)        |  /
60  * +-------------------------------+ -
61  * | GMAC H                        |  ----- For AES-GCM
62  * +-------------------------------+ -
63  */
64 
65 /* Fields in the key context header. */
66 #define S_TLS_KEYCTX_TX_WR_DUALCK    12
67 #define M_TLS_KEYCTX_TX_WR_DUALCK    0x1
68 #define V_TLS_KEYCTX_TX_WR_DUALCK(x) ((x) << S_TLS_KEYCTX_TX_WR_DUALCK)
69 #define G_TLS_KEYCTX_TX_WR_DUALCK(x) \
70     (((x) >> S_TLS_KEYCTX_TX_WR_DUALCK) & M_TLS_KEYCTX_TX_WR_DUALCK)
71 #define F_TLS_KEYCTX_TX_WR_DUALCK    V_TLS_KEYCTX_TX_WR_DUALCK(1U)
72 
73 #define S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT 11
74 #define M_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT 0x1
75 #define V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(x) \
76     ((x) << S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT)
77 #define G_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(x) \
78     (((x) >> S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT) & \
79      M_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT)
80 #define F_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT \
81     V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(1U)
82 
83 #define S_TLS_KEYCTX_TX_WR_SALT_PRESENT 10
84 #define M_TLS_KEYCTX_TX_WR_SALT_PRESENT 0x1
85 #define V_TLS_KEYCTX_TX_WR_SALT_PRESENT(x) \
86     ((x) << S_TLS_KEYCTX_TX_WR_SALT_PRESENT)
87 #define G_TLS_KEYCTX_TX_WR_SALT_PRESENT(x) \
88     (((x) >> S_TLS_KEYCTX_TX_WR_SALT_PRESENT) & \
89      M_TLS_KEYCTX_TX_WR_SALT_PRESENT)
90 #define F_TLS_KEYCTX_TX_WR_SALT_PRESENT \
91     V_TLS_KEYCTX_TX_WR_SALT_PRESENT(1U)
92 
93 #define S_TLS_KEYCTX_TX_WR_TXCK_SIZE 6
94 #define M_TLS_KEYCTX_TX_WR_TXCK_SIZE 0xf
95 #define V_TLS_KEYCTX_TX_WR_TXCK_SIZE(x) \
96     ((x) << S_TLS_KEYCTX_TX_WR_TXCK_SIZE)
97 #define G_TLS_KEYCTX_TX_WR_TXCK_SIZE(x) \
98     (((x) >> S_TLS_KEYCTX_TX_WR_TXCK_SIZE) & \
99      M_TLS_KEYCTX_TX_WR_TXCK_SIZE)
100 
101 #define S_TLS_KEYCTX_TX_WR_TXMK_SIZE 2
102 #define M_TLS_KEYCTX_TX_WR_TXMK_SIZE 0xf
103 #define V_TLS_KEYCTX_TX_WR_TXMK_SIZE(x) \
104     ((x) << S_TLS_KEYCTX_TX_WR_TXMK_SIZE)
105 #define G_TLS_KEYCTX_TX_WR_TXMK_SIZE(x) \
106     (((x) >> S_TLS_KEYCTX_TX_WR_TXMK_SIZE) & \
107      M_TLS_KEYCTX_TX_WR_TXMK_SIZE)
108 
109 #define S_TLS_KEYCTX_TX_WR_TXVALID   0
110 #define M_TLS_KEYCTX_TX_WR_TXVALID   0x1
111 #define V_TLS_KEYCTX_TX_WR_TXVALID(x) \
112     ((x) << S_TLS_KEYCTX_TX_WR_TXVALID)
113 #define G_TLS_KEYCTX_TX_WR_TXVALID(x) \
114     (((x) >> S_TLS_KEYCTX_TX_WR_TXVALID) & M_TLS_KEYCTX_TX_WR_TXVALID)
115 #define F_TLS_KEYCTX_TX_WR_TXVALID   V_TLS_KEYCTX_TX_WR_TXVALID(1U)
116 
117 #define S_TLS_KEYCTX_TX_WR_FLITCNT   3
118 #define M_TLS_KEYCTX_TX_WR_FLITCNT   0x1f
119 #define V_TLS_KEYCTX_TX_WR_FLITCNT(x) \
120     ((x) << S_TLS_KEYCTX_TX_WR_FLITCNT)
121 #define G_TLS_KEYCTX_TX_WR_FLITCNT(x) \
122     (((x) >> S_TLS_KEYCTX_TX_WR_FLITCNT) & M_TLS_KEYCTX_TX_WR_FLITCNT)
123 
124 #define S_TLS_KEYCTX_TX_WR_HMACCTRL  0
125 #define M_TLS_KEYCTX_TX_WR_HMACCTRL  0x7
126 #define V_TLS_KEYCTX_TX_WR_HMACCTRL(x) \
127     ((x) << S_TLS_KEYCTX_TX_WR_HMACCTRL)
128 #define G_TLS_KEYCTX_TX_WR_HMACCTRL(x) \
129     (((x) >> S_TLS_KEYCTX_TX_WR_HMACCTRL) & M_TLS_KEYCTX_TX_WR_HMACCTRL)
130 
131 #define S_TLS_KEYCTX_TX_WR_PROTOVER  4
132 #define M_TLS_KEYCTX_TX_WR_PROTOVER  0xf
133 #define V_TLS_KEYCTX_TX_WR_PROTOVER(x) \
134     ((x) << S_TLS_KEYCTX_TX_WR_PROTOVER)
135 #define G_TLS_KEYCTX_TX_WR_PROTOVER(x) \
136     (((x) >> S_TLS_KEYCTX_TX_WR_PROTOVER) & M_TLS_KEYCTX_TX_WR_PROTOVER)
137 
138 #define S_TLS_KEYCTX_TX_WR_CIPHMODE  0
139 #define M_TLS_KEYCTX_TX_WR_CIPHMODE  0xf
140 #define V_TLS_KEYCTX_TX_WR_CIPHMODE(x) \
141     ((x) << S_TLS_KEYCTX_TX_WR_CIPHMODE)
142 #define G_TLS_KEYCTX_TX_WR_CIPHMODE(x) \
143     (((x) >> S_TLS_KEYCTX_TX_WR_CIPHMODE) & M_TLS_KEYCTX_TX_WR_CIPHMODE)
144 
145 #define S_TLS_KEYCTX_TX_WR_AUTHMODE  4
146 #define M_TLS_KEYCTX_TX_WR_AUTHMODE  0xf
147 #define V_TLS_KEYCTX_TX_WR_AUTHMODE(x) \
148     ((x) << S_TLS_KEYCTX_TX_WR_AUTHMODE)
149 #define G_TLS_KEYCTX_TX_WR_AUTHMODE(x) \
150     (((x) >> S_TLS_KEYCTX_TX_WR_AUTHMODE) & M_TLS_KEYCTX_TX_WR_AUTHMODE)
151 
152 #define S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL 3
153 #define M_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL 0x1
154 #define V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(x) \
155     ((x) << S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL)
156 #define G_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(x) \
157     (((x) >> S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL) & \
158      M_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL)
159 #define F_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL \
160     V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(1U)
161 
162 #define S_TLS_KEYCTX_TX_WR_SEQNUMCTRL 1
163 #define M_TLS_KEYCTX_TX_WR_SEQNUMCTRL 0x3
164 #define V_TLS_KEYCTX_TX_WR_SEQNUMCTRL(x) \
165     ((x) << S_TLS_KEYCTX_TX_WR_SEQNUMCTRL)
166 #define G_TLS_KEYCTX_TX_WR_SEQNUMCTRL(x) \
167     (((x) >> S_TLS_KEYCTX_TX_WR_SEQNUMCTRL) & \
168      M_TLS_KEYCTX_TX_WR_SEQNUMCTRL)
169 
170 #define S_TLS_KEYCTX_TX_WR_RXVALID   0
171 #define M_TLS_KEYCTX_TX_WR_RXVALID   0x1
172 #define V_TLS_KEYCTX_TX_WR_RXVALID(x) \
173     ((x) << S_TLS_KEYCTX_TX_WR_RXVALID)
174 #define G_TLS_KEYCTX_TX_WR_RXVALID(x) \
175     (((x) >> S_TLS_KEYCTX_TX_WR_RXVALID) & M_TLS_KEYCTX_TX_WR_RXVALID)
176 #define F_TLS_KEYCTX_TX_WR_RXVALID   V_TLS_KEYCTX_TX_WR_RXVALID(1U)
177 
178 #define S_TLS_KEYCTX_TX_WR_IVPRESENT 7
179 #define M_TLS_KEYCTX_TX_WR_IVPRESENT 0x1
180 #define V_TLS_KEYCTX_TX_WR_IVPRESENT(x) \
181     ((x) << S_TLS_KEYCTX_TX_WR_IVPRESENT)
182 #define G_TLS_KEYCTX_TX_WR_IVPRESENT(x) \
183     (((x) >> S_TLS_KEYCTX_TX_WR_IVPRESENT) & \
184      M_TLS_KEYCTX_TX_WR_IVPRESENT)
185 #define F_TLS_KEYCTX_TX_WR_IVPRESENT V_TLS_KEYCTX_TX_WR_IVPRESENT(1U)
186 
187 #define S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT 6
188 #define M_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT 0x1
189 #define V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(x) \
190     ((x) << S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT)
191 #define G_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(x) \
192     (((x) >> S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT) & \
193      M_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT)
194 #define F_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT \
195     V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(1U)
196 
197 #define S_TLS_KEYCTX_TX_WR_RXCK_SIZE 3
198 #define M_TLS_KEYCTX_TX_WR_RXCK_SIZE 0x7
199 #define V_TLS_KEYCTX_TX_WR_RXCK_SIZE(x) \
200     ((x) << S_TLS_KEYCTX_TX_WR_RXCK_SIZE)
201 #define G_TLS_KEYCTX_TX_WR_RXCK_SIZE(x) \
202     (((x) >> S_TLS_KEYCTX_TX_WR_RXCK_SIZE) & \
203      M_TLS_KEYCTX_TX_WR_RXCK_SIZE)
204 
205 #define S_TLS_KEYCTX_TX_WR_RXMK_SIZE 0
206 #define M_TLS_KEYCTX_TX_WR_RXMK_SIZE 0x7
207 #define V_TLS_KEYCTX_TX_WR_RXMK_SIZE(x) \
208     ((x) << S_TLS_KEYCTX_TX_WR_RXMK_SIZE)
209 #define G_TLS_KEYCTX_TX_WR_RXMK_SIZE(x) \
210     (((x) >> S_TLS_KEYCTX_TX_WR_RXMK_SIZE) & \
211      M_TLS_KEYCTX_TX_WR_RXMK_SIZE)
212 
213 #define S_TLS_KEYCTX_TX_WR_IVINSERT  55
214 #define M_TLS_KEYCTX_TX_WR_IVINSERT  0x1ffULL
215 #define V_TLS_KEYCTX_TX_WR_IVINSERT(x) \
216     ((x) << S_TLS_KEYCTX_TX_WR_IVINSERT)
217 #define G_TLS_KEYCTX_TX_WR_IVINSERT(x) \
218     (((x) >> S_TLS_KEYCTX_TX_WR_IVINSERT) & M_TLS_KEYCTX_TX_WR_IVINSERT)
219 
220 #define S_TLS_KEYCTX_TX_WR_AADSTRTOFST 47
221 #define M_TLS_KEYCTX_TX_WR_AADSTRTOFST 0xffULL
222 #define V_TLS_KEYCTX_TX_WR_AADSTRTOFST(x) \
223     ((x) << S_TLS_KEYCTX_TX_WR_AADSTRTOFST)
224 #define G_TLS_KEYCTX_TX_WR_AADSTRTOFST(x) \
225     (((x) >> S_TLS_KEYCTX_TX_WR_AADSTRTOFST) & \
226      M_TLS_KEYCTX_TX_WR_AADSTRTOFST)
227 
228 #define S_TLS_KEYCTX_TX_WR_AADSTOPOFST 39
229 #define M_TLS_KEYCTX_TX_WR_AADSTOPOFST 0xffULL
230 #define V_TLS_KEYCTX_TX_WR_AADSTOPOFST(x) \
231     ((x) << S_TLS_KEYCTX_TX_WR_AADSTOPOFST)
232 #define G_TLS_KEYCTX_TX_WR_AADSTOPOFST(x) \
233     (((x) >> S_TLS_KEYCTX_TX_WR_AADSTOPOFST) & \
234      M_TLS_KEYCTX_TX_WR_AADSTOPOFST)
235 
236 #define S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST 30
237 #define M_TLS_KEYCTX_TX_WR_CIPHERSRTOFST 0x1ffULL
238 #define V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(x) \
239     ((x) << S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST)
240 #define G_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(x) \
241     (((x) >> S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST) & \
242      M_TLS_KEYCTX_TX_WR_CIPHERSRTOFST)
243 
244 #define S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST 23
245 #define M_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST 0x7f
246 #define V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(x) \
247     ((x) << S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST)
248 #define G_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(x) \
249     (((x) >> S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST) & \
250      M_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST)
251 
252 #define S_TLS_KEYCTX_TX_WR_AUTHSRTOFST 14
253 #define M_TLS_KEYCTX_TX_WR_AUTHSRTOFST 0x1ff
254 #define V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(x) \
255     ((x) << S_TLS_KEYCTX_TX_WR_AUTHSRTOFST)
256 #define G_TLS_KEYCTX_TX_WR_AUTHSRTOFST(x) \
257     (((x) >> S_TLS_KEYCTX_TX_WR_AUTHSRTOFST) & \
258      M_TLS_KEYCTX_TX_WR_AUTHSRTOFST)
259 
260 #define S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST 7
261 #define M_TLS_KEYCTX_TX_WR_AUTHSTOPOFST 0x7f
262 #define V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(x) \
263     ((x) << S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST)
264 #define G_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(x) \
265     (((x) >> S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST) & \
266      M_TLS_KEYCTX_TX_WR_AUTHSTOPOFST)
267 
268 #define S_TLS_KEYCTX_TX_WR_AUTHINSRT 0
269 #define M_TLS_KEYCTX_TX_WR_AUTHINSRT 0x7f
270 #define V_TLS_KEYCTX_TX_WR_AUTHINSRT(x) \
271     ((x) << S_TLS_KEYCTX_TX_WR_AUTHINSRT)
272 #define G_TLS_KEYCTX_TX_WR_AUTHINSRT(x) \
273     (((x) >> S_TLS_KEYCTX_TX_WR_AUTHINSRT) & \
274      M_TLS_KEYCTX_TX_WR_AUTHINSRT)
275 
276 /* Key Context Programming Operation type */
277 #define KEY_WRITE_RX			0x1
278 #define KEY_WRITE_TX			0x2
279 #define KEY_DELETE_RX			0x4
280 #define KEY_DELETE_TX			0x8
281 
282 #define S_KEY_CLR_LOC		4
283 #define M_KEY_CLR_LOC		0xf
284 #define V_KEY_CLR_LOC(x)	((x) << S_KEY_CLR_LOC)
285 #define G_KEY_CLR_LOC(x)	(((x) >> S_KEY_CLR_LOC) & M_KEY_CLR_LOC)
286 #define F_KEY_CLR_LOC		V_KEY_CLR_LOC(1U)
287 
288 #define S_KEY_GET_LOC           0
289 #define M_KEY_GET_LOC           0xf
290 #define V_KEY_GET_LOC(x)        ((x) << S_KEY_GET_LOC)
291 #define G_KEY_GET_LOC(x)        (((x) >> S_KEY_GET_LOC) & M_KEY_GET_LOC)
292 
293 /*
294  * Generate the initial GMAC hash state for a AES-GCM key.
295  *
296  * Borrowed from AES_GMAC_Setkey().
297  */
298 void
299 t4_init_gmac_hash(const char *key, int klen, char *ghash)
300 {
301 	static char zeroes[GMAC_BLOCK_LEN];
302 	uint32_t keysched[4 * (RIJNDAEL_MAXNR + 1)];
303 	int rounds;
304 
305 	rounds = rijndaelKeySetupEnc(keysched, key, klen * 8);
306 	rijndaelEncrypt(keysched, rounds, zeroes, ghash);
307 	explicit_bzero(keysched, sizeof(keysched));
308 }
309 
310 /* Copy out the partial hash state from a software hash implementation. */
311 void
312 t4_copy_partial_hash(int alg, union authctx *auth_ctx, void *dst)
313 {
314 	uint32_t *u32;
315 	uint64_t *u64;
316 	u_int i;
317 
318 	u32 = (uint32_t *)dst;
319 	u64 = (uint64_t *)dst;
320 	switch (alg) {
321 	case CRYPTO_SHA1:
322 	case CRYPTO_SHA1_HMAC:
323 		for (i = 0; i < SHA1_HASH_LEN / 4; i++)
324 			u32[i] = htobe32(auth_ctx->sha1ctx.h.b32[i]);
325 		break;
326 	case CRYPTO_SHA2_224:
327 	case CRYPTO_SHA2_224_HMAC:
328 		for (i = 0; i < SHA2_256_HASH_LEN / 4; i++)
329 			u32[i] = htobe32(auth_ctx->sha224ctx.state[i]);
330 		break;
331 	case CRYPTO_SHA2_256:
332 	case CRYPTO_SHA2_256_HMAC:
333 		for (i = 0; i < SHA2_256_HASH_LEN / 4; i++)
334 			u32[i] = htobe32(auth_ctx->sha256ctx.state[i]);
335 		break;
336 	case CRYPTO_SHA2_384:
337 	case CRYPTO_SHA2_384_HMAC:
338 		for (i = 0; i < SHA2_512_HASH_LEN / 8; i++)
339 			u64[i] = htobe64(auth_ctx->sha384ctx.state[i]);
340 		break;
341 	case CRYPTO_SHA2_512:
342 	case CRYPTO_SHA2_512_HMAC:
343 		for (i = 0; i < SHA2_512_HASH_LEN / 8; i++)
344 			u64[i] = htobe64(auth_ctx->sha512ctx.state[i]);
345 		break;
346 	}
347 }
348 
349 void
350 t4_init_hmac_digest(const struct auth_hash *axf, u_int partial_digest_len,
351     const char *key, int klen, char *dst)
352 {
353 	union authctx auth_ctx;
354 
355 	hmac_init_ipad(axf, key, klen, &auth_ctx);
356 	t4_copy_partial_hash(axf->type, &auth_ctx, dst);
357 
358 	dst += roundup2(partial_digest_len, 16);
359 
360 	hmac_init_opad(axf, key, klen, &auth_ctx);
361 	t4_copy_partial_hash(axf->type, &auth_ctx, dst);
362 
363 	explicit_bzero(&auth_ctx, sizeof(auth_ctx));
364 }
365 
366 /*
367  * Borrowed from cesa_prep_aes_key().
368  *
369  * NB: The crypto engine wants the words in the decryption key in reverse
370  * order.
371  */
372 void
373 t4_aes_getdeckey(void *dec_key, const void *enc_key, unsigned int kbits)
374 {
375 	uint32_t ek[4 * (RIJNDAEL_MAXNR + 1)];
376 	uint32_t *dkey;
377 	int i;
378 
379 	rijndaelKeySetupEnc(ek, enc_key, kbits);
380 	dkey = dec_key;
381 	dkey += (kbits / 8) / 4;
382 
383 	switch (kbits) {
384 	case 128:
385 		for (i = 0; i < 4; i++)
386 			*--dkey = htobe32(ek[4 * 10 + i]);
387 		break;
388 	case 192:
389 		for (i = 0; i < 2; i++)
390 			*--dkey = htobe32(ek[4 * 11 + 2 + i]);
391 		for (i = 0; i < 4; i++)
392 			*--dkey = htobe32(ek[4 * 12 + i]);
393 		break;
394 	case 256:
395 		for (i = 0; i < 4; i++)
396 			*--dkey = htobe32(ek[4 * 13 + i]);
397 		for (i = 0; i < 4; i++)
398 			*--dkey = htobe32(ek[4 * 14 + i]);
399 		break;
400 	}
401 	MPASS(dkey == dec_key);
402 	explicit_bzero(ek, sizeof(ek));
403 }
404 
405 #ifdef KERN_TLS
406 /*
407  * - keyid management
408  * - request to program key?
409  */
410 u_int
411 t4_tls_key_info_size(const struct ktls_session *tls)
412 {
413 	u_int key_info_size, mac_key_size;
414 
415 	key_info_size = sizeof(struct tx_keyctx_hdr) +
416 	    tls->params.cipher_key_len;
417 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
418 		key_info_size += GMAC_BLOCK_LEN;
419 	} else {
420 		switch (tls->params.auth_algorithm) {
421 		case CRYPTO_SHA1_HMAC:
422 			mac_key_size = SHA1_HASH_LEN;
423 			break;
424 		case CRYPTO_SHA2_256_HMAC:
425 			mac_key_size = SHA2_256_HASH_LEN;
426 			break;
427 		case CRYPTO_SHA2_384_HMAC:
428 			mac_key_size = SHA2_512_HASH_LEN;
429 			break;
430 		default:
431 			__assert_unreachable();
432 		}
433 		key_info_size += roundup2(mac_key_size, 16) * 2;
434 	}
435 	return (key_info_size);
436 }
437 
438 int
439 t4_tls_proto_ver(const struct ktls_session *tls)
440 {
441 	if (tls->params.tls_vminor == TLS_MINOR_VER_ONE)
442 		return (SCMD_PROTO_VERSION_TLS_1_1);
443 	else
444 		return (SCMD_PROTO_VERSION_TLS_1_2);
445 }
446 
447 int
448 t4_tls_cipher_mode(const struct ktls_session *tls)
449 {
450 	switch (tls->params.cipher_algorithm) {
451 	case CRYPTO_AES_CBC:
452 		return (SCMD_CIPH_MODE_AES_CBC);
453 	case CRYPTO_AES_NIST_GCM_16:
454 		return (SCMD_CIPH_MODE_AES_GCM);
455 	default:
456 		return (SCMD_CIPH_MODE_NOP);
457 	}
458 }
459 
460 int
461 t4_tls_auth_mode(const struct ktls_session *tls)
462 {
463 	switch (tls->params.cipher_algorithm) {
464 	case CRYPTO_AES_CBC:
465 		switch (tls->params.auth_algorithm) {
466 		case CRYPTO_SHA1_HMAC:
467 			return (SCMD_AUTH_MODE_SHA1);
468 		case CRYPTO_SHA2_256_HMAC:
469 			return (SCMD_AUTH_MODE_SHA256);
470 		case CRYPTO_SHA2_384_HMAC:
471 			return (SCMD_AUTH_MODE_SHA512_384);
472 		default:
473 			return (SCMD_AUTH_MODE_NOP);
474 		}
475 	case CRYPTO_AES_NIST_GCM_16:
476 		return (SCMD_AUTH_MODE_GHASH);
477 	default:
478 		return (SCMD_AUTH_MODE_NOP);
479 	}
480 }
481 
482 int
483 t4_tls_hmac_ctrl(const struct ktls_session *tls)
484 {
485 	switch (tls->params.cipher_algorithm) {
486 	case CRYPTO_AES_CBC:
487 		return (SCMD_HMAC_CTRL_NO_TRUNC);
488 	case CRYPTO_AES_NIST_GCM_16:
489 		return (SCMD_HMAC_CTRL_NOP);
490 	default:
491 		return (SCMD_HMAC_CTRL_NOP);
492 	}
493 }
494 
495 static int
496 tls_cipher_key_size(const struct ktls_session *tls)
497 {
498 	switch (tls->params.cipher_key_len) {
499 	case 128 / 8:
500 		return (CHCR_KEYCTX_CIPHER_KEY_SIZE_128);
501 	case 192 / 8:
502 		return (CHCR_KEYCTX_CIPHER_KEY_SIZE_192);
503 	case 256 / 8:
504 		return (CHCR_KEYCTX_CIPHER_KEY_SIZE_256);
505 	default:
506 		__assert_unreachable();
507 	}
508 }
509 
510 static int
511 tls_mac_key_size(const struct ktls_session *tls)
512 {
513 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16)
514 		return (CHCR_KEYCTX_MAC_KEY_SIZE_512);
515 	else {
516 		switch (tls->params.auth_algorithm) {
517 		case CRYPTO_SHA1_HMAC:
518 			return (CHCR_KEYCTX_MAC_KEY_SIZE_160);
519 		case CRYPTO_SHA2_256_HMAC:
520 			return (CHCR_KEYCTX_MAC_KEY_SIZE_256);
521 		case CRYPTO_SHA2_384_HMAC:
522 			return (CHCR_KEYCTX_MAC_KEY_SIZE_512);
523 		default:
524 			__assert_unreachable();
525 		}
526 	}
527 }
528 
529 void
530 t4_tls_key_ctx(const struct ktls_session *tls, int direction,
531     struct tls_keyctx *kctx)
532 {
533 	const struct auth_hash *axf;
534 	u_int mac_key_size;
535 	char *hash;
536 
537 	/* Key context header. */
538 	if (direction == KTLS_TX) {
539 		kctx->u.txhdr.ctxlen = t4_tls_key_info_size(tls) / 16;
540 		kctx->u.txhdr.dualck_to_txvalid =
541 		    V_TLS_KEYCTX_TX_WR_SALT_PRESENT(1) |
542 		    V_TLS_KEYCTX_TX_WR_TXCK_SIZE(tls_cipher_key_size(tls)) |
543 		    V_TLS_KEYCTX_TX_WR_TXMK_SIZE(tls_mac_key_size(tls)) |
544 		    V_TLS_KEYCTX_TX_WR_TXVALID(1);
545 		if (tls->params.cipher_algorithm == CRYPTO_AES_CBC)
546 			kctx->u.txhdr.dualck_to_txvalid |=
547 			    V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(1);
548 		kctx->u.txhdr.dualck_to_txvalid =
549 		    htobe16(kctx->u.txhdr.dualck_to_txvalid);
550 	} else {
551 		kctx->u.rxhdr.flitcnt_hmacctrl =
552 		    V_TLS_KEYCTX_TX_WR_FLITCNT(t4_tls_key_info_size(tls) / 16) |
553 		    V_TLS_KEYCTX_TX_WR_HMACCTRL(t4_tls_hmac_ctrl(tls));
554 
555 		kctx->u.rxhdr.protover_ciphmode =
556 		    V_TLS_KEYCTX_TX_WR_PROTOVER(t4_tls_proto_ver(tls)) |
557 		    V_TLS_KEYCTX_TX_WR_CIPHMODE(t4_tls_cipher_mode(tls));
558 
559 		kctx->u.rxhdr.authmode_to_rxvalid =
560 		    V_TLS_KEYCTX_TX_WR_AUTHMODE(t4_tls_auth_mode(tls)) |
561 		    V_TLS_KEYCTX_TX_WR_SEQNUMCTRL(3) |
562 		    V_TLS_KEYCTX_TX_WR_RXVALID(1);
563 
564 		kctx->u.rxhdr.ivpresent_to_rxmk_size =
565 		    V_TLS_KEYCTX_TX_WR_IVPRESENT(0) |
566 		    V_TLS_KEYCTX_TX_WR_RXCK_SIZE(tls_cipher_key_size(tls)) |
567 		    V_TLS_KEYCTX_TX_WR_RXMK_SIZE(tls_mac_key_size(tls));
568 
569 		if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
570 			kctx->u.rxhdr.ivinsert_to_authinsrt =
571 			    htobe64(V_TLS_KEYCTX_TX_WR_IVINSERT(6ULL) |
572 				V_TLS_KEYCTX_TX_WR_AADSTRTOFST(1ULL) |
573 				V_TLS_KEYCTX_TX_WR_AADSTOPOFST(5ULL) |
574 				V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(14ULL) |
575 				V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(16ULL) |
576 				V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(14ULL) |
577 				V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(0ULL) |
578 				V_TLS_KEYCTX_TX_WR_AUTHINSRT(16ULL));
579 		} else {
580 			kctx->u.rxhdr.authmode_to_rxvalid |=
581 			    V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(1);
582 			kctx->u.rxhdr.ivpresent_to_rxmk_size |=
583 			    V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(1);
584 			kctx->u.rxhdr.ivinsert_to_authinsrt =
585 			    htobe64(V_TLS_KEYCTX_TX_WR_IVINSERT(6ULL) |
586 				V_TLS_KEYCTX_TX_WR_AADSTRTOFST(1ULL) |
587 				V_TLS_KEYCTX_TX_WR_AADSTOPOFST(5ULL) |
588 				V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(22ULL) |
589 				V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(0ULL) |
590 				V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(22ULL) |
591 				V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(0ULL) |
592 				V_TLS_KEYCTX_TX_WR_AUTHINSRT(0ULL));
593 		}
594 	}
595 
596 	/* Key. */
597 	if (direction == KTLS_RX &&
598 	    tls->params.cipher_algorithm == CRYPTO_AES_CBC)
599 		t4_aes_getdeckey(kctx->keys.edkey, tls->params.cipher_key,
600 		    tls->params.cipher_key_len * 8);
601 	else
602 		memcpy(kctx->keys.edkey, tls->params.cipher_key,
603 		    tls->params.cipher_key_len);
604 
605 	/* Auth state and implicit IV (salt). */
606 	hash = kctx->keys.edkey + tls->params.cipher_key_len;
607 	if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) {
608 		_Static_assert(offsetof(struct tx_keyctx_hdr, txsalt) ==
609 		    offsetof(struct rx_keyctx_hdr, rxsalt),
610 		    "salt offset mismatch");
611 		memcpy(kctx->u.txhdr.txsalt, tls->params.iv, SALT_SIZE);
612 		t4_init_gmac_hash(tls->params.cipher_key,
613 		    tls->params.cipher_key_len, hash);
614 	} else {
615 		switch (tls->params.auth_algorithm) {
616 		case CRYPTO_SHA1_HMAC:
617 			axf = &auth_hash_hmac_sha1;
618 			mac_key_size = SHA1_HASH_LEN;
619 			break;
620 		case CRYPTO_SHA2_256_HMAC:
621 			axf = &auth_hash_hmac_sha2_256;
622 			mac_key_size = SHA2_256_HASH_LEN;
623 			break;
624 		case CRYPTO_SHA2_384_HMAC:
625 			axf = &auth_hash_hmac_sha2_384;
626 			mac_key_size = SHA2_512_HASH_LEN;
627 			break;
628 		default:
629 			__assert_unreachable();
630 		}
631 		t4_init_hmac_digest(axf, mac_key_size, tls->params.auth_key,
632 		    tls->params.auth_key_len, hash);
633 	}
634 }
635 
636 int
637 t4_alloc_tls_keyid(struct adapter *sc)
638 {
639 	vmem_addr_t addr;
640 
641 	if (sc->vres.key.size == 0)
642 		return (-1);
643 
644 	if (vmem_alloc(sc->key_map, TLS_KEY_CONTEXT_SZ, M_NOWAIT | M_FIRSTFIT,
645 	    &addr) != 0)
646 		return (-1);
647 
648 	return (addr);
649 }
650 
651 void
652 t4_free_tls_keyid(struct adapter *sc, int keyid)
653 {
654 	vmem_free(sc->key_map, keyid, TLS_KEY_CONTEXT_SZ);
655 }
656 
657 void
658 t4_write_tlskey_wr(const struct ktls_session *tls, int direction, int tid,
659     int flags, int keyid, struct tls_key_req *kwr)
660 {
661 	kwr->wr_hi = htobe32(V_FW_WR_OP(FW_ULPTX_WR) | F_FW_WR_ATOMIC | flags);
662 	kwr->wr_mid = htobe32(V_FW_WR_LEN16(DIV_ROUND_UP(TLS_KEY_WR_SZ, 16)) |
663 	    V_FW_WR_FLOWID(tid));
664 	kwr->protocol = t4_tls_proto_ver(tls);
665 	kwr->mfs = htobe16(tls->params.max_frame_len);
666 	kwr->reneg_to_write_rx = V_KEY_GET_LOC(direction == KTLS_TX ?
667 	    KEY_WRITE_TX : KEY_WRITE_RX);
668 
669 	/* master command */
670 	kwr->cmd = htobe32(V_ULPTX_CMD(ULP_TX_MEM_WRITE) |
671 	    V_T5_ULP_MEMIO_ORDER(1) | V_T5_ULP_MEMIO_IMM(1));
672 	kwr->dlen = htobe32(V_ULP_MEMIO_DATA_LEN(TLS_KEY_CONTEXT_SZ >> 5));
673 	kwr->len16 = htobe32((tid << 8) |
674 	    DIV_ROUND_UP(TLS_KEY_WR_SZ - sizeof(struct work_request_hdr), 16));
675 	kwr->kaddr = htobe32(V_ULP_MEMIO_ADDR(keyid >> 5));
676 
677 	/* sub command */
678 	kwr->sc_more = htobe32(V_ULPTX_CMD(ULP_TX_SC_IMM));
679 	kwr->sc_len = htobe32(TLS_KEY_CONTEXT_SZ);
680 }
681 #endif
682