1 /*- 2 * Copyright (c) 2017-2019 Chelsio Communications, Inc. 3 * All rights reserved. 4 * Written by: John Baldwin <jhb@FreeBSD.org> 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 1. Redistributions of source code must retain the above copyright 10 * notice, this list of conditions and the following disclaimer. 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in the 13 * documentation and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 25 * SUCH DAMAGE. 26 */ 27 28 #include "opt_kern_tls.h" 29 30 #include <sys/types.h> 31 #include <sys/ktls.h> 32 #include <sys/malloc.h> 33 34 #include <opencrypto/cryptodev.h> 35 #include <opencrypto/xform.h> 36 37 #include "common/common.h" 38 #include "crypto/t4_crypto.h" 39 40 /* 41 * Crypto operations use a key context to store cipher keys and 42 * partial hash digests. They can either be passed inline as part of 43 * a work request using crypto or they can be stored in card RAM. For 44 * the latter case, work requests must replace the inline key context 45 * with a request to read the context from card RAM. 46 * 47 * The format of a key context: 48 * 49 * +-------------------------------+ 50 * | key context header | 51 * +-------------------------------+ 52 * | AES key | ----- For requests with AES 53 * +-------------------------------+ 54 * | Hash state | ----- For hash-only requests 55 * +-------------------------------+ - 56 * | IPAD (16-byte aligned) | \ 57 * +-------------------------------+ +---- For requests with HMAC 58 * | OPAD (16-byte aligned) | / 59 * +-------------------------------+ - 60 * | GMAC H | ----- For AES-GCM 61 * +-------------------------------+ - 62 */ 63 64 /* Fields in the key context header. */ 65 #define S_TLS_KEYCTX_TX_WR_DUALCK 12 66 #define M_TLS_KEYCTX_TX_WR_DUALCK 0x1 67 #define V_TLS_KEYCTX_TX_WR_DUALCK(x) ((x) << S_TLS_KEYCTX_TX_WR_DUALCK) 68 #define G_TLS_KEYCTX_TX_WR_DUALCK(x) \ 69 (((x) >> S_TLS_KEYCTX_TX_WR_DUALCK) & M_TLS_KEYCTX_TX_WR_DUALCK) 70 #define F_TLS_KEYCTX_TX_WR_DUALCK V_TLS_KEYCTX_TX_WR_DUALCK(1U) 71 72 #define S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT 11 73 #define M_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT 0x1 74 #define V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(x) \ 75 ((x) << S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT) 76 #define G_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(x) \ 77 (((x) >> S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT) & \ 78 M_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT) 79 #define F_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT \ 80 V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(1U) 81 82 #define S_TLS_KEYCTX_TX_WR_SALT_PRESENT 10 83 #define M_TLS_KEYCTX_TX_WR_SALT_PRESENT 0x1 84 #define V_TLS_KEYCTX_TX_WR_SALT_PRESENT(x) \ 85 ((x) << S_TLS_KEYCTX_TX_WR_SALT_PRESENT) 86 #define G_TLS_KEYCTX_TX_WR_SALT_PRESENT(x) \ 87 (((x) >> S_TLS_KEYCTX_TX_WR_SALT_PRESENT) & \ 88 M_TLS_KEYCTX_TX_WR_SALT_PRESENT) 89 #define F_TLS_KEYCTX_TX_WR_SALT_PRESENT \ 90 V_TLS_KEYCTX_TX_WR_SALT_PRESENT(1U) 91 92 #define S_TLS_KEYCTX_TX_WR_TXCK_SIZE 6 93 #define M_TLS_KEYCTX_TX_WR_TXCK_SIZE 0xf 94 #define V_TLS_KEYCTX_TX_WR_TXCK_SIZE(x) \ 95 ((x) << S_TLS_KEYCTX_TX_WR_TXCK_SIZE) 96 #define G_TLS_KEYCTX_TX_WR_TXCK_SIZE(x) \ 97 (((x) >> S_TLS_KEYCTX_TX_WR_TXCK_SIZE) & \ 98 M_TLS_KEYCTX_TX_WR_TXCK_SIZE) 99 100 #define S_TLS_KEYCTX_TX_WR_TXMK_SIZE 2 101 #define M_TLS_KEYCTX_TX_WR_TXMK_SIZE 0xf 102 #define V_TLS_KEYCTX_TX_WR_TXMK_SIZE(x) \ 103 ((x) << S_TLS_KEYCTX_TX_WR_TXMK_SIZE) 104 #define G_TLS_KEYCTX_TX_WR_TXMK_SIZE(x) \ 105 (((x) >> S_TLS_KEYCTX_TX_WR_TXMK_SIZE) & \ 106 M_TLS_KEYCTX_TX_WR_TXMK_SIZE) 107 108 #define S_TLS_KEYCTX_TX_WR_TXVALID 0 109 #define M_TLS_KEYCTX_TX_WR_TXVALID 0x1 110 #define V_TLS_KEYCTX_TX_WR_TXVALID(x) \ 111 ((x) << S_TLS_KEYCTX_TX_WR_TXVALID) 112 #define G_TLS_KEYCTX_TX_WR_TXVALID(x) \ 113 (((x) >> S_TLS_KEYCTX_TX_WR_TXVALID) & M_TLS_KEYCTX_TX_WR_TXVALID) 114 #define F_TLS_KEYCTX_TX_WR_TXVALID V_TLS_KEYCTX_TX_WR_TXVALID(1U) 115 116 #define S_TLS_KEYCTX_TX_WR_FLITCNT 3 117 #define M_TLS_KEYCTX_TX_WR_FLITCNT 0x1f 118 #define V_TLS_KEYCTX_TX_WR_FLITCNT(x) \ 119 ((x) << S_TLS_KEYCTX_TX_WR_FLITCNT) 120 #define G_TLS_KEYCTX_TX_WR_FLITCNT(x) \ 121 (((x) >> S_TLS_KEYCTX_TX_WR_FLITCNT) & M_TLS_KEYCTX_TX_WR_FLITCNT) 122 123 #define S_TLS_KEYCTX_TX_WR_HMACCTRL 0 124 #define M_TLS_KEYCTX_TX_WR_HMACCTRL 0x7 125 #define V_TLS_KEYCTX_TX_WR_HMACCTRL(x) \ 126 ((x) << S_TLS_KEYCTX_TX_WR_HMACCTRL) 127 #define G_TLS_KEYCTX_TX_WR_HMACCTRL(x) \ 128 (((x) >> S_TLS_KEYCTX_TX_WR_HMACCTRL) & M_TLS_KEYCTX_TX_WR_HMACCTRL) 129 130 #define S_TLS_KEYCTX_TX_WR_PROTOVER 4 131 #define M_TLS_KEYCTX_TX_WR_PROTOVER 0xf 132 #define V_TLS_KEYCTX_TX_WR_PROTOVER(x) \ 133 ((x) << S_TLS_KEYCTX_TX_WR_PROTOVER) 134 #define G_TLS_KEYCTX_TX_WR_PROTOVER(x) \ 135 (((x) >> S_TLS_KEYCTX_TX_WR_PROTOVER) & M_TLS_KEYCTX_TX_WR_PROTOVER) 136 137 #define S_TLS_KEYCTX_TX_WR_CIPHMODE 0 138 #define M_TLS_KEYCTX_TX_WR_CIPHMODE 0xf 139 #define V_TLS_KEYCTX_TX_WR_CIPHMODE(x) \ 140 ((x) << S_TLS_KEYCTX_TX_WR_CIPHMODE) 141 #define G_TLS_KEYCTX_TX_WR_CIPHMODE(x) \ 142 (((x) >> S_TLS_KEYCTX_TX_WR_CIPHMODE) & M_TLS_KEYCTX_TX_WR_CIPHMODE) 143 144 #define S_TLS_KEYCTX_TX_WR_AUTHMODE 4 145 #define M_TLS_KEYCTX_TX_WR_AUTHMODE 0xf 146 #define V_TLS_KEYCTX_TX_WR_AUTHMODE(x) \ 147 ((x) << S_TLS_KEYCTX_TX_WR_AUTHMODE) 148 #define G_TLS_KEYCTX_TX_WR_AUTHMODE(x) \ 149 (((x) >> S_TLS_KEYCTX_TX_WR_AUTHMODE) & M_TLS_KEYCTX_TX_WR_AUTHMODE) 150 151 #define S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL 3 152 #define M_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL 0x1 153 #define V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(x) \ 154 ((x) << S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL) 155 #define G_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(x) \ 156 (((x) >> S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL) & \ 157 M_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL) 158 #define F_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL \ 159 V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(1U) 160 161 #define S_TLS_KEYCTX_TX_WR_SEQNUMCTRL 1 162 #define M_TLS_KEYCTX_TX_WR_SEQNUMCTRL 0x3 163 #define V_TLS_KEYCTX_TX_WR_SEQNUMCTRL(x) \ 164 ((x) << S_TLS_KEYCTX_TX_WR_SEQNUMCTRL) 165 #define G_TLS_KEYCTX_TX_WR_SEQNUMCTRL(x) \ 166 (((x) >> S_TLS_KEYCTX_TX_WR_SEQNUMCTRL) & \ 167 M_TLS_KEYCTX_TX_WR_SEQNUMCTRL) 168 169 #define S_TLS_KEYCTX_TX_WR_RXVALID 0 170 #define M_TLS_KEYCTX_TX_WR_RXVALID 0x1 171 #define V_TLS_KEYCTX_TX_WR_RXVALID(x) \ 172 ((x) << S_TLS_KEYCTX_TX_WR_RXVALID) 173 #define G_TLS_KEYCTX_TX_WR_RXVALID(x) \ 174 (((x) >> S_TLS_KEYCTX_TX_WR_RXVALID) & M_TLS_KEYCTX_TX_WR_RXVALID) 175 #define F_TLS_KEYCTX_TX_WR_RXVALID V_TLS_KEYCTX_TX_WR_RXVALID(1U) 176 177 #define S_TLS_KEYCTX_TX_WR_IVPRESENT 7 178 #define M_TLS_KEYCTX_TX_WR_IVPRESENT 0x1 179 #define V_TLS_KEYCTX_TX_WR_IVPRESENT(x) \ 180 ((x) << S_TLS_KEYCTX_TX_WR_IVPRESENT) 181 #define G_TLS_KEYCTX_TX_WR_IVPRESENT(x) \ 182 (((x) >> S_TLS_KEYCTX_TX_WR_IVPRESENT) & \ 183 M_TLS_KEYCTX_TX_WR_IVPRESENT) 184 #define F_TLS_KEYCTX_TX_WR_IVPRESENT V_TLS_KEYCTX_TX_WR_IVPRESENT(1U) 185 186 #define S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT 6 187 #define M_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT 0x1 188 #define V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(x) \ 189 ((x) << S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT) 190 #define G_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(x) \ 191 (((x) >> S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT) & \ 192 M_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT) 193 #define F_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT \ 194 V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(1U) 195 196 #define S_TLS_KEYCTX_TX_WR_RXCK_SIZE 3 197 #define M_TLS_KEYCTX_TX_WR_RXCK_SIZE 0x7 198 #define V_TLS_KEYCTX_TX_WR_RXCK_SIZE(x) \ 199 ((x) << S_TLS_KEYCTX_TX_WR_RXCK_SIZE) 200 #define G_TLS_KEYCTX_TX_WR_RXCK_SIZE(x) \ 201 (((x) >> S_TLS_KEYCTX_TX_WR_RXCK_SIZE) & \ 202 M_TLS_KEYCTX_TX_WR_RXCK_SIZE) 203 204 #define S_TLS_KEYCTX_TX_WR_RXMK_SIZE 0 205 #define M_TLS_KEYCTX_TX_WR_RXMK_SIZE 0x7 206 #define V_TLS_KEYCTX_TX_WR_RXMK_SIZE(x) \ 207 ((x) << S_TLS_KEYCTX_TX_WR_RXMK_SIZE) 208 #define G_TLS_KEYCTX_TX_WR_RXMK_SIZE(x) \ 209 (((x) >> S_TLS_KEYCTX_TX_WR_RXMK_SIZE) & \ 210 M_TLS_KEYCTX_TX_WR_RXMK_SIZE) 211 212 #define S_TLS_KEYCTX_TX_WR_IVINSERT 55 213 #define M_TLS_KEYCTX_TX_WR_IVINSERT 0x1ffULL 214 #define V_TLS_KEYCTX_TX_WR_IVINSERT(x) \ 215 ((x) << S_TLS_KEYCTX_TX_WR_IVINSERT) 216 #define G_TLS_KEYCTX_TX_WR_IVINSERT(x) \ 217 (((x) >> S_TLS_KEYCTX_TX_WR_IVINSERT) & M_TLS_KEYCTX_TX_WR_IVINSERT) 218 219 #define S_TLS_KEYCTX_TX_WR_AADSTRTOFST 47 220 #define M_TLS_KEYCTX_TX_WR_AADSTRTOFST 0xffULL 221 #define V_TLS_KEYCTX_TX_WR_AADSTRTOFST(x) \ 222 ((x) << S_TLS_KEYCTX_TX_WR_AADSTRTOFST) 223 #define G_TLS_KEYCTX_TX_WR_AADSTRTOFST(x) \ 224 (((x) >> S_TLS_KEYCTX_TX_WR_AADSTRTOFST) & \ 225 M_TLS_KEYCTX_TX_WR_AADSTRTOFST) 226 227 #define S_TLS_KEYCTX_TX_WR_AADSTOPOFST 39 228 #define M_TLS_KEYCTX_TX_WR_AADSTOPOFST 0xffULL 229 #define V_TLS_KEYCTX_TX_WR_AADSTOPOFST(x) \ 230 ((x) << S_TLS_KEYCTX_TX_WR_AADSTOPOFST) 231 #define G_TLS_KEYCTX_TX_WR_AADSTOPOFST(x) \ 232 (((x) >> S_TLS_KEYCTX_TX_WR_AADSTOPOFST) & \ 233 M_TLS_KEYCTX_TX_WR_AADSTOPOFST) 234 235 #define S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST 30 236 #define M_TLS_KEYCTX_TX_WR_CIPHERSRTOFST 0x1ffULL 237 #define V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(x) \ 238 ((x) << S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST) 239 #define G_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(x) \ 240 (((x) >> S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST) & \ 241 M_TLS_KEYCTX_TX_WR_CIPHERSRTOFST) 242 243 #define S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST 23 244 #define M_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST 0x7f 245 #define V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(x) \ 246 ((x) << S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST) 247 #define G_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(x) \ 248 (((x) >> S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST) & \ 249 M_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST) 250 251 #define S_TLS_KEYCTX_TX_WR_AUTHSRTOFST 14 252 #define M_TLS_KEYCTX_TX_WR_AUTHSRTOFST 0x1ff 253 #define V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(x) \ 254 ((x) << S_TLS_KEYCTX_TX_WR_AUTHSRTOFST) 255 #define G_TLS_KEYCTX_TX_WR_AUTHSRTOFST(x) \ 256 (((x) >> S_TLS_KEYCTX_TX_WR_AUTHSRTOFST) & \ 257 M_TLS_KEYCTX_TX_WR_AUTHSRTOFST) 258 259 #define S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST 7 260 #define M_TLS_KEYCTX_TX_WR_AUTHSTOPOFST 0x7f 261 #define V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(x) \ 262 ((x) << S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST) 263 #define G_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(x) \ 264 (((x) >> S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST) & \ 265 M_TLS_KEYCTX_TX_WR_AUTHSTOPOFST) 266 267 #define S_TLS_KEYCTX_TX_WR_AUTHINSRT 0 268 #define M_TLS_KEYCTX_TX_WR_AUTHINSRT 0x7f 269 #define V_TLS_KEYCTX_TX_WR_AUTHINSRT(x) \ 270 ((x) << S_TLS_KEYCTX_TX_WR_AUTHINSRT) 271 #define G_TLS_KEYCTX_TX_WR_AUTHINSRT(x) \ 272 (((x) >> S_TLS_KEYCTX_TX_WR_AUTHINSRT) & \ 273 M_TLS_KEYCTX_TX_WR_AUTHINSRT) 274 275 /* Key Context Programming Operation type */ 276 #define KEY_WRITE_RX 0x1 277 #define KEY_WRITE_TX 0x2 278 #define KEY_DELETE_RX 0x4 279 #define KEY_DELETE_TX 0x8 280 281 #define S_KEY_CLR_LOC 4 282 #define M_KEY_CLR_LOC 0xf 283 #define V_KEY_CLR_LOC(x) ((x) << S_KEY_CLR_LOC) 284 #define G_KEY_CLR_LOC(x) (((x) >> S_KEY_CLR_LOC) & M_KEY_CLR_LOC) 285 #define F_KEY_CLR_LOC V_KEY_CLR_LOC(1U) 286 287 #define S_KEY_GET_LOC 0 288 #define M_KEY_GET_LOC 0xf 289 #define V_KEY_GET_LOC(x) ((x) << S_KEY_GET_LOC) 290 #define G_KEY_GET_LOC(x) (((x) >> S_KEY_GET_LOC) & M_KEY_GET_LOC) 291 292 /* 293 * Generate the initial GMAC hash state for a AES-GCM key. 294 * 295 * Borrowed from AES_GMAC_Setkey(). 296 */ 297 void 298 t4_init_gmac_hash(const char *key, int klen, char *ghash) 299 { 300 static char zeroes[GMAC_BLOCK_LEN]; 301 uint32_t keysched[4 * (RIJNDAEL_MAXNR + 1)]; 302 int rounds; 303 304 rounds = rijndaelKeySetupEnc(keysched, key, klen * 8); 305 rijndaelEncrypt(keysched, rounds, zeroes, ghash); 306 explicit_bzero(keysched, sizeof(keysched)); 307 } 308 309 /* Copy out the partial hash state from a software hash implementation. */ 310 void 311 t4_copy_partial_hash(int alg, union authctx *auth_ctx, void *dst) 312 { 313 uint32_t *u32; 314 uint64_t *u64; 315 u_int i; 316 317 u32 = (uint32_t *)dst; 318 u64 = (uint64_t *)dst; 319 switch (alg) { 320 case CRYPTO_SHA1: 321 case CRYPTO_SHA1_HMAC: 322 for (i = 0; i < SHA1_HASH_LEN / 4; i++) 323 u32[i] = htobe32(auth_ctx->sha1ctx.h.b32[i]); 324 break; 325 case CRYPTO_SHA2_224: 326 case CRYPTO_SHA2_224_HMAC: 327 for (i = 0; i < SHA2_256_HASH_LEN / 4; i++) 328 u32[i] = htobe32(auth_ctx->sha224ctx.state[i]); 329 break; 330 case CRYPTO_SHA2_256: 331 case CRYPTO_SHA2_256_HMAC: 332 for (i = 0; i < SHA2_256_HASH_LEN / 4; i++) 333 u32[i] = htobe32(auth_ctx->sha256ctx.state[i]); 334 break; 335 case CRYPTO_SHA2_384: 336 case CRYPTO_SHA2_384_HMAC: 337 for (i = 0; i < SHA2_512_HASH_LEN / 8; i++) 338 u64[i] = htobe64(auth_ctx->sha384ctx.state[i]); 339 break; 340 case CRYPTO_SHA2_512: 341 case CRYPTO_SHA2_512_HMAC: 342 for (i = 0; i < SHA2_512_HASH_LEN / 8; i++) 343 u64[i] = htobe64(auth_ctx->sha512ctx.state[i]); 344 break; 345 } 346 } 347 348 void 349 t4_init_hmac_digest(const struct auth_hash *axf, u_int partial_digest_len, 350 const char *key, int klen, char *dst) 351 { 352 union authctx auth_ctx; 353 354 hmac_init_ipad(axf, key, klen, &auth_ctx); 355 t4_copy_partial_hash(axf->type, &auth_ctx, dst); 356 357 dst += roundup2(partial_digest_len, 16); 358 359 hmac_init_opad(axf, key, klen, &auth_ctx); 360 t4_copy_partial_hash(axf->type, &auth_ctx, dst); 361 362 explicit_bzero(&auth_ctx, sizeof(auth_ctx)); 363 } 364 365 /* 366 * Borrowed from cesa_prep_aes_key(). 367 * 368 * NB: The crypto engine wants the words in the decryption key in reverse 369 * order. 370 */ 371 void 372 t4_aes_getdeckey(void *dec_key, const void *enc_key, unsigned int kbits) 373 { 374 uint32_t ek[4 * (RIJNDAEL_MAXNR + 1)]; 375 uint32_t *dkey; 376 int i; 377 378 rijndaelKeySetupEnc(ek, enc_key, kbits); 379 dkey = dec_key; 380 dkey += (kbits / 8) / 4; 381 382 switch (kbits) { 383 case 128: 384 for (i = 0; i < 4; i++) 385 *--dkey = htobe32(ek[4 * 10 + i]); 386 break; 387 case 192: 388 for (i = 0; i < 2; i++) 389 *--dkey = htobe32(ek[4 * 11 + 2 + i]); 390 for (i = 0; i < 4; i++) 391 *--dkey = htobe32(ek[4 * 12 + i]); 392 break; 393 case 256: 394 for (i = 0; i < 4; i++) 395 *--dkey = htobe32(ek[4 * 13 + i]); 396 for (i = 0; i < 4; i++) 397 *--dkey = htobe32(ek[4 * 14 + i]); 398 break; 399 } 400 MPASS(dkey == dec_key); 401 explicit_bzero(ek, sizeof(ek)); 402 } 403 404 #ifdef KERN_TLS 405 /* 406 * - keyid management 407 * - request to program key? 408 */ 409 u_int 410 t4_tls_key_info_size(const struct ktls_session *tls) 411 { 412 u_int key_info_size, mac_key_size; 413 414 key_info_size = sizeof(struct tx_keyctx_hdr) + 415 tls->params.cipher_key_len; 416 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { 417 key_info_size += GMAC_BLOCK_LEN; 418 } else { 419 switch (tls->params.auth_algorithm) { 420 case CRYPTO_SHA1_HMAC: 421 mac_key_size = SHA1_HASH_LEN; 422 break; 423 case CRYPTO_SHA2_256_HMAC: 424 mac_key_size = SHA2_256_HASH_LEN; 425 break; 426 case CRYPTO_SHA2_384_HMAC: 427 mac_key_size = SHA2_512_HASH_LEN; 428 break; 429 default: 430 __assert_unreachable(); 431 } 432 key_info_size += roundup2(mac_key_size, 16) * 2; 433 } 434 return (key_info_size); 435 } 436 437 int 438 t4_tls_proto_ver(const struct ktls_session *tls) 439 { 440 if (tls->params.tls_vminor == TLS_MINOR_VER_ONE) 441 return (SCMD_PROTO_VERSION_TLS_1_1); 442 else 443 return (SCMD_PROTO_VERSION_TLS_1_2); 444 } 445 446 int 447 t4_tls_cipher_mode(const struct ktls_session *tls) 448 { 449 switch (tls->params.cipher_algorithm) { 450 case CRYPTO_AES_CBC: 451 return (SCMD_CIPH_MODE_AES_CBC); 452 case CRYPTO_AES_NIST_GCM_16: 453 return (SCMD_CIPH_MODE_AES_GCM); 454 default: 455 return (SCMD_CIPH_MODE_NOP); 456 } 457 } 458 459 int 460 t4_tls_auth_mode(const struct ktls_session *tls) 461 { 462 switch (tls->params.cipher_algorithm) { 463 case CRYPTO_AES_CBC: 464 switch (tls->params.auth_algorithm) { 465 case CRYPTO_SHA1_HMAC: 466 return (SCMD_AUTH_MODE_SHA1); 467 case CRYPTO_SHA2_256_HMAC: 468 return (SCMD_AUTH_MODE_SHA256); 469 case CRYPTO_SHA2_384_HMAC: 470 return (SCMD_AUTH_MODE_SHA512_384); 471 default: 472 return (SCMD_AUTH_MODE_NOP); 473 } 474 case CRYPTO_AES_NIST_GCM_16: 475 return (SCMD_AUTH_MODE_GHASH); 476 default: 477 return (SCMD_AUTH_MODE_NOP); 478 } 479 } 480 481 int 482 t4_tls_hmac_ctrl(const struct ktls_session *tls) 483 { 484 switch (tls->params.cipher_algorithm) { 485 case CRYPTO_AES_CBC: 486 return (SCMD_HMAC_CTRL_NO_TRUNC); 487 case CRYPTO_AES_NIST_GCM_16: 488 return (SCMD_HMAC_CTRL_NOP); 489 default: 490 return (SCMD_HMAC_CTRL_NOP); 491 } 492 } 493 494 static int 495 tls_cipher_key_size(const struct ktls_session *tls) 496 { 497 switch (tls->params.cipher_key_len) { 498 case 128 / 8: 499 return (CHCR_KEYCTX_CIPHER_KEY_SIZE_128); 500 case 192 / 8: 501 return (CHCR_KEYCTX_CIPHER_KEY_SIZE_192); 502 case 256 / 8: 503 return (CHCR_KEYCTX_CIPHER_KEY_SIZE_256); 504 default: 505 __assert_unreachable(); 506 } 507 } 508 509 static int 510 tls_mac_key_size(const struct ktls_session *tls) 511 { 512 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) 513 return (CHCR_KEYCTX_MAC_KEY_SIZE_512); 514 else { 515 switch (tls->params.auth_algorithm) { 516 case CRYPTO_SHA1_HMAC: 517 return (CHCR_KEYCTX_MAC_KEY_SIZE_160); 518 case CRYPTO_SHA2_256_HMAC: 519 return (CHCR_KEYCTX_MAC_KEY_SIZE_256); 520 case CRYPTO_SHA2_384_HMAC: 521 return (CHCR_KEYCTX_MAC_KEY_SIZE_512); 522 default: 523 __assert_unreachable(); 524 } 525 } 526 } 527 528 void 529 t4_tls_key_ctx(const struct ktls_session *tls, int direction, 530 struct tls_keyctx *kctx) 531 { 532 const struct auth_hash *axf; 533 u_int mac_key_size; 534 char *hash; 535 536 /* Key context header. */ 537 if (direction == KTLS_TX) { 538 kctx->u.txhdr.ctxlen = t4_tls_key_info_size(tls) / 16; 539 kctx->u.txhdr.dualck_to_txvalid = 540 V_TLS_KEYCTX_TX_WR_SALT_PRESENT(1) | 541 V_TLS_KEYCTX_TX_WR_TXCK_SIZE(tls_cipher_key_size(tls)) | 542 V_TLS_KEYCTX_TX_WR_TXMK_SIZE(tls_mac_key_size(tls)) | 543 V_TLS_KEYCTX_TX_WR_TXVALID(1); 544 if (tls->params.cipher_algorithm == CRYPTO_AES_CBC) 545 kctx->u.txhdr.dualck_to_txvalid |= 546 V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(1); 547 kctx->u.txhdr.dualck_to_txvalid = 548 htobe16(kctx->u.txhdr.dualck_to_txvalid); 549 } else { 550 kctx->u.rxhdr.flitcnt_hmacctrl = 551 V_TLS_KEYCTX_TX_WR_FLITCNT(t4_tls_key_info_size(tls) / 16) | 552 V_TLS_KEYCTX_TX_WR_HMACCTRL(t4_tls_hmac_ctrl(tls)); 553 554 kctx->u.rxhdr.protover_ciphmode = 555 V_TLS_KEYCTX_TX_WR_PROTOVER(t4_tls_proto_ver(tls)) | 556 V_TLS_KEYCTX_TX_WR_CIPHMODE(t4_tls_cipher_mode(tls)); 557 558 kctx->u.rxhdr.authmode_to_rxvalid = 559 V_TLS_KEYCTX_TX_WR_AUTHMODE(t4_tls_auth_mode(tls)) | 560 V_TLS_KEYCTX_TX_WR_SEQNUMCTRL(3) | 561 V_TLS_KEYCTX_TX_WR_RXVALID(1); 562 563 kctx->u.rxhdr.ivpresent_to_rxmk_size = 564 V_TLS_KEYCTX_TX_WR_IVPRESENT(0) | 565 V_TLS_KEYCTX_TX_WR_RXCK_SIZE(tls_cipher_key_size(tls)) | 566 V_TLS_KEYCTX_TX_WR_RXMK_SIZE(tls_mac_key_size(tls)); 567 568 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { 569 kctx->u.rxhdr.ivinsert_to_authinsrt = 570 htobe64(V_TLS_KEYCTX_TX_WR_IVINSERT(6ULL) | 571 V_TLS_KEYCTX_TX_WR_AADSTRTOFST(1ULL) | 572 V_TLS_KEYCTX_TX_WR_AADSTOPOFST(5ULL) | 573 V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(14ULL) | 574 V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(16ULL) | 575 V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(14ULL) | 576 V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(0ULL) | 577 V_TLS_KEYCTX_TX_WR_AUTHINSRT(16ULL)); 578 } else { 579 kctx->u.rxhdr.authmode_to_rxvalid |= 580 V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(1); 581 kctx->u.rxhdr.ivpresent_to_rxmk_size |= 582 V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(1); 583 kctx->u.rxhdr.ivinsert_to_authinsrt = 584 htobe64(V_TLS_KEYCTX_TX_WR_IVINSERT(6ULL) | 585 V_TLS_KEYCTX_TX_WR_AADSTRTOFST(1ULL) | 586 V_TLS_KEYCTX_TX_WR_AADSTOPOFST(5ULL) | 587 V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(22ULL) | 588 V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(0ULL) | 589 V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(22ULL) | 590 V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(0ULL) | 591 V_TLS_KEYCTX_TX_WR_AUTHINSRT(0ULL)); 592 } 593 } 594 595 /* Key. */ 596 if (direction == KTLS_RX && 597 tls->params.cipher_algorithm == CRYPTO_AES_CBC) 598 t4_aes_getdeckey(kctx->keys.edkey, tls->params.cipher_key, 599 tls->params.cipher_key_len * 8); 600 else 601 memcpy(kctx->keys.edkey, tls->params.cipher_key, 602 tls->params.cipher_key_len); 603 604 /* Auth state and implicit IV (salt). */ 605 hash = kctx->keys.edkey + tls->params.cipher_key_len; 606 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { 607 _Static_assert(offsetof(struct tx_keyctx_hdr, txsalt) == 608 offsetof(struct rx_keyctx_hdr, rxsalt), 609 "salt offset mismatch"); 610 memcpy(kctx->u.txhdr.txsalt, tls->params.iv, SALT_SIZE); 611 t4_init_gmac_hash(tls->params.cipher_key, 612 tls->params.cipher_key_len, hash); 613 } else { 614 switch (tls->params.auth_algorithm) { 615 case CRYPTO_SHA1_HMAC: 616 axf = &auth_hash_hmac_sha1; 617 mac_key_size = SHA1_HASH_LEN; 618 break; 619 case CRYPTO_SHA2_256_HMAC: 620 axf = &auth_hash_hmac_sha2_256; 621 mac_key_size = SHA2_256_HASH_LEN; 622 break; 623 case CRYPTO_SHA2_384_HMAC: 624 axf = &auth_hash_hmac_sha2_384; 625 mac_key_size = SHA2_512_HASH_LEN; 626 break; 627 default: 628 __assert_unreachable(); 629 } 630 t4_init_hmac_digest(axf, mac_key_size, tls->params.auth_key, 631 tls->params.auth_key_len, hash); 632 } 633 } 634 635 int 636 t4_alloc_tls_keyid(struct adapter *sc) 637 { 638 vmem_addr_t addr; 639 640 if (sc->vres.key.size == 0) 641 return (-1); 642 643 if (vmem_alloc(sc->key_map, TLS_KEY_CONTEXT_SZ, M_NOWAIT | M_FIRSTFIT, 644 &addr) != 0) 645 return (-1); 646 647 return (addr); 648 } 649 650 void 651 t4_free_tls_keyid(struct adapter *sc, int keyid) 652 { 653 vmem_free(sc->key_map, keyid, TLS_KEY_CONTEXT_SZ); 654 } 655 656 void 657 t4_write_tlskey_wr(const struct ktls_session *tls, int direction, int tid, 658 int flags, int keyid, struct tls_key_req *kwr) 659 { 660 kwr->wr_hi = htobe32(V_FW_WR_OP(FW_ULPTX_WR) | F_FW_WR_ATOMIC | flags); 661 kwr->wr_mid = htobe32(V_FW_WR_LEN16(DIV_ROUND_UP(TLS_KEY_WR_SZ, 16)) | 662 V_FW_WR_FLOWID(tid)); 663 kwr->protocol = t4_tls_proto_ver(tls); 664 kwr->mfs = htobe16(tls->params.max_frame_len); 665 kwr->reneg_to_write_rx = V_KEY_GET_LOC(direction == KTLS_TX ? 666 KEY_WRITE_TX : KEY_WRITE_RX); 667 668 /* master command */ 669 kwr->cmd = htobe32(V_ULPTX_CMD(ULP_TX_MEM_WRITE) | 670 V_T5_ULP_MEMIO_ORDER(1) | V_T5_ULP_MEMIO_IMM(1)); 671 kwr->dlen = htobe32(V_ULP_MEMIO_DATA_LEN(TLS_KEY_CONTEXT_SZ >> 5)); 672 kwr->len16 = htobe32((tid << 8) | 673 DIV_ROUND_UP(TLS_KEY_WR_SZ - sizeof(struct work_request_hdr), 16)); 674 kwr->kaddr = htobe32(V_ULP_MEMIO_ADDR(keyid >> 5)); 675 676 /* sub command */ 677 kwr->sc_more = htobe32(V_ULPTX_CMD(ULP_TX_SC_IMM)); 678 kwr->sc_len = htobe32(TLS_KEY_CONTEXT_SZ); 679 } 680 #endif 681