1 /*- 2 * Copyright (c) 2017-2019 Chelsio Communications, Inc. 3 * All rights reserved. 4 * Written by: John Baldwin <jhb@FreeBSD.org> 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 1. Redistributions of source code must retain the above copyright 10 * notice, this list of conditions and the following disclaimer. 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in the 13 * documentation and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 25 * SUCH DAMAGE. 26 */ 27 28 #include "opt_kern_tls.h" 29 30 #include <sys/types.h> 31 #include <sys/ktls.h> 32 #include <sys/malloc.h> 33 34 #include <opencrypto/cryptodev.h> 35 #include <opencrypto/xform.h> 36 37 #include "common/common.h" 38 #include "crypto/t4_crypto.h" 39 40 /* 41 * Crypto operations use a key context to store cipher keys and 42 * partial hash digests. They can either be passed inline as part of 43 * a work request using crypto or they can be stored in card RAM. For 44 * the latter case, work requests must replace the inline key context 45 * with a request to read the context from card RAM. 46 * 47 * The format of a key context: 48 * 49 * +-------------------------------+ 50 * | key context header | 51 * +-------------------------------+ 52 * | AES key | ----- For requests with AES 53 * +-------------------------------+ 54 * | Hash state | ----- For hash-only requests 55 * +-------------------------------+ - 56 * | IPAD (16-byte aligned) | \ 57 * +-------------------------------+ +---- For requests with HMAC 58 * | OPAD (16-byte aligned) | / 59 * +-------------------------------+ - 60 * | GMAC H | ----- For AES-GCM 61 * +-------------------------------+ - 62 */ 63 64 /* Fields in the key context header. */ 65 #define S_TLS_KEYCTX_TX_WR_DUALCK 12 66 #define M_TLS_KEYCTX_TX_WR_DUALCK 0x1 67 #define V_TLS_KEYCTX_TX_WR_DUALCK(x) ((x) << S_TLS_KEYCTX_TX_WR_DUALCK) 68 #define G_TLS_KEYCTX_TX_WR_DUALCK(x) \ 69 (((x) >> S_TLS_KEYCTX_TX_WR_DUALCK) & M_TLS_KEYCTX_TX_WR_DUALCK) 70 #define F_TLS_KEYCTX_TX_WR_DUALCK V_TLS_KEYCTX_TX_WR_DUALCK(1U) 71 72 #define S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT 11 73 #define M_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT 0x1 74 #define V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(x) \ 75 ((x) << S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT) 76 #define G_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(x) \ 77 (((x) >> S_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT) & \ 78 M_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT) 79 #define F_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT \ 80 V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(1U) 81 82 #define S_TLS_KEYCTX_TX_WR_SALT_PRESENT 10 83 #define M_TLS_KEYCTX_TX_WR_SALT_PRESENT 0x1 84 #define V_TLS_KEYCTX_TX_WR_SALT_PRESENT(x) \ 85 ((x) << S_TLS_KEYCTX_TX_WR_SALT_PRESENT) 86 #define G_TLS_KEYCTX_TX_WR_SALT_PRESENT(x) \ 87 (((x) >> S_TLS_KEYCTX_TX_WR_SALT_PRESENT) & \ 88 M_TLS_KEYCTX_TX_WR_SALT_PRESENT) 89 #define F_TLS_KEYCTX_TX_WR_SALT_PRESENT \ 90 V_TLS_KEYCTX_TX_WR_SALT_PRESENT(1U) 91 92 #define S_TLS_KEYCTX_TX_WR_TXCK_SIZE 6 93 #define M_TLS_KEYCTX_TX_WR_TXCK_SIZE 0xf 94 #define V_TLS_KEYCTX_TX_WR_TXCK_SIZE(x) \ 95 ((x) << S_TLS_KEYCTX_TX_WR_TXCK_SIZE) 96 #define G_TLS_KEYCTX_TX_WR_TXCK_SIZE(x) \ 97 (((x) >> S_TLS_KEYCTX_TX_WR_TXCK_SIZE) & \ 98 M_TLS_KEYCTX_TX_WR_TXCK_SIZE) 99 100 #define S_TLS_KEYCTX_TX_WR_TXMK_SIZE 2 101 #define M_TLS_KEYCTX_TX_WR_TXMK_SIZE 0xf 102 #define V_TLS_KEYCTX_TX_WR_TXMK_SIZE(x) \ 103 ((x) << S_TLS_KEYCTX_TX_WR_TXMK_SIZE) 104 #define G_TLS_KEYCTX_TX_WR_TXMK_SIZE(x) \ 105 (((x) >> S_TLS_KEYCTX_TX_WR_TXMK_SIZE) & \ 106 M_TLS_KEYCTX_TX_WR_TXMK_SIZE) 107 108 #define S_TLS_KEYCTX_TX_WR_TXVALID 0 109 #define M_TLS_KEYCTX_TX_WR_TXVALID 0x1 110 #define V_TLS_KEYCTX_TX_WR_TXVALID(x) \ 111 ((x) << S_TLS_KEYCTX_TX_WR_TXVALID) 112 #define G_TLS_KEYCTX_TX_WR_TXVALID(x) \ 113 (((x) >> S_TLS_KEYCTX_TX_WR_TXVALID) & M_TLS_KEYCTX_TX_WR_TXVALID) 114 #define F_TLS_KEYCTX_TX_WR_TXVALID V_TLS_KEYCTX_TX_WR_TXVALID(1U) 115 116 #define S_TLS_KEYCTX_TX_WR_FLITCNT 3 117 #define M_TLS_KEYCTX_TX_WR_FLITCNT 0x1f 118 #define V_TLS_KEYCTX_TX_WR_FLITCNT(x) \ 119 ((x) << S_TLS_KEYCTX_TX_WR_FLITCNT) 120 #define G_TLS_KEYCTX_TX_WR_FLITCNT(x) \ 121 (((x) >> S_TLS_KEYCTX_TX_WR_FLITCNT) & M_TLS_KEYCTX_TX_WR_FLITCNT) 122 123 #define S_TLS_KEYCTX_TX_WR_HMACCTRL 0 124 #define M_TLS_KEYCTX_TX_WR_HMACCTRL 0x7 125 #define V_TLS_KEYCTX_TX_WR_HMACCTRL(x) \ 126 ((x) << S_TLS_KEYCTX_TX_WR_HMACCTRL) 127 #define G_TLS_KEYCTX_TX_WR_HMACCTRL(x) \ 128 (((x) >> S_TLS_KEYCTX_TX_WR_HMACCTRL) & M_TLS_KEYCTX_TX_WR_HMACCTRL) 129 130 #define S_TLS_KEYCTX_TX_WR_PROTOVER 4 131 #define M_TLS_KEYCTX_TX_WR_PROTOVER 0xf 132 #define V_TLS_KEYCTX_TX_WR_PROTOVER(x) \ 133 ((x) << S_TLS_KEYCTX_TX_WR_PROTOVER) 134 #define G_TLS_KEYCTX_TX_WR_PROTOVER(x) \ 135 (((x) >> S_TLS_KEYCTX_TX_WR_PROTOVER) & M_TLS_KEYCTX_TX_WR_PROTOVER) 136 137 #define S_TLS_KEYCTX_TX_WR_CIPHMODE 0 138 #define M_TLS_KEYCTX_TX_WR_CIPHMODE 0xf 139 #define V_TLS_KEYCTX_TX_WR_CIPHMODE(x) \ 140 ((x) << S_TLS_KEYCTX_TX_WR_CIPHMODE) 141 #define G_TLS_KEYCTX_TX_WR_CIPHMODE(x) \ 142 (((x) >> S_TLS_KEYCTX_TX_WR_CIPHMODE) & M_TLS_KEYCTX_TX_WR_CIPHMODE) 143 144 #define S_TLS_KEYCTX_TX_WR_AUTHMODE 4 145 #define M_TLS_KEYCTX_TX_WR_AUTHMODE 0xf 146 #define V_TLS_KEYCTX_TX_WR_AUTHMODE(x) \ 147 ((x) << S_TLS_KEYCTX_TX_WR_AUTHMODE) 148 #define G_TLS_KEYCTX_TX_WR_AUTHMODE(x) \ 149 (((x) >> S_TLS_KEYCTX_TX_WR_AUTHMODE) & M_TLS_KEYCTX_TX_WR_AUTHMODE) 150 151 #define S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL 3 152 #define M_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL 0x1 153 #define V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(x) \ 154 ((x) << S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL) 155 #define G_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(x) \ 156 (((x) >> S_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL) & \ 157 M_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL) 158 #define F_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL \ 159 V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(1U) 160 161 #define S_TLS_KEYCTX_TX_WR_SEQNUMCTRL 1 162 #define M_TLS_KEYCTX_TX_WR_SEQNUMCTRL 0x3 163 #define V_TLS_KEYCTX_TX_WR_SEQNUMCTRL(x) \ 164 ((x) << S_TLS_KEYCTX_TX_WR_SEQNUMCTRL) 165 #define G_TLS_KEYCTX_TX_WR_SEQNUMCTRL(x) \ 166 (((x) >> S_TLS_KEYCTX_TX_WR_SEQNUMCTRL) & \ 167 M_TLS_KEYCTX_TX_WR_SEQNUMCTRL) 168 169 #define S_TLS_KEYCTX_TX_WR_RXVALID 0 170 #define M_TLS_KEYCTX_TX_WR_RXVALID 0x1 171 #define V_TLS_KEYCTX_TX_WR_RXVALID(x) \ 172 ((x) << S_TLS_KEYCTX_TX_WR_RXVALID) 173 #define G_TLS_KEYCTX_TX_WR_RXVALID(x) \ 174 (((x) >> S_TLS_KEYCTX_TX_WR_RXVALID) & M_TLS_KEYCTX_TX_WR_RXVALID) 175 #define F_TLS_KEYCTX_TX_WR_RXVALID V_TLS_KEYCTX_TX_WR_RXVALID(1U) 176 177 #define S_TLS_KEYCTX_TX_WR_IVPRESENT 7 178 #define M_TLS_KEYCTX_TX_WR_IVPRESENT 0x1 179 #define V_TLS_KEYCTX_TX_WR_IVPRESENT(x) \ 180 ((x) << S_TLS_KEYCTX_TX_WR_IVPRESENT) 181 #define G_TLS_KEYCTX_TX_WR_IVPRESENT(x) \ 182 (((x) >> S_TLS_KEYCTX_TX_WR_IVPRESENT) & \ 183 M_TLS_KEYCTX_TX_WR_IVPRESENT) 184 #define F_TLS_KEYCTX_TX_WR_IVPRESENT V_TLS_KEYCTX_TX_WR_IVPRESENT(1U) 185 186 #define S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT 6 187 #define M_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT 0x1 188 #define V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(x) \ 189 ((x) << S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT) 190 #define G_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(x) \ 191 (((x) >> S_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT) & \ 192 M_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT) 193 #define F_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT \ 194 V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(1U) 195 196 #define S_TLS_KEYCTX_TX_WR_RXCK_SIZE 3 197 #define M_TLS_KEYCTX_TX_WR_RXCK_SIZE 0x7 198 #define V_TLS_KEYCTX_TX_WR_RXCK_SIZE(x) \ 199 ((x) << S_TLS_KEYCTX_TX_WR_RXCK_SIZE) 200 #define G_TLS_KEYCTX_TX_WR_RXCK_SIZE(x) \ 201 (((x) >> S_TLS_KEYCTX_TX_WR_RXCK_SIZE) & \ 202 M_TLS_KEYCTX_TX_WR_RXCK_SIZE) 203 204 #define S_TLS_KEYCTX_TX_WR_RXMK_SIZE 0 205 #define M_TLS_KEYCTX_TX_WR_RXMK_SIZE 0x7 206 #define V_TLS_KEYCTX_TX_WR_RXMK_SIZE(x) \ 207 ((x) << S_TLS_KEYCTX_TX_WR_RXMK_SIZE) 208 #define G_TLS_KEYCTX_TX_WR_RXMK_SIZE(x) \ 209 (((x) >> S_TLS_KEYCTX_TX_WR_RXMK_SIZE) & \ 210 M_TLS_KEYCTX_TX_WR_RXMK_SIZE) 211 212 #define S_TLS_KEYCTX_TX_WR_IVINSERT 55 213 #define M_TLS_KEYCTX_TX_WR_IVINSERT 0x1ffULL 214 #define V_TLS_KEYCTX_TX_WR_IVINSERT(x) \ 215 ((x) << S_TLS_KEYCTX_TX_WR_IVINSERT) 216 #define G_TLS_KEYCTX_TX_WR_IVINSERT(x) \ 217 (((x) >> S_TLS_KEYCTX_TX_WR_IVINSERT) & M_TLS_KEYCTX_TX_WR_IVINSERT) 218 219 #define S_TLS_KEYCTX_TX_WR_AADSTRTOFST 47 220 #define M_TLS_KEYCTX_TX_WR_AADSTRTOFST 0xffULL 221 #define V_TLS_KEYCTX_TX_WR_AADSTRTOFST(x) \ 222 ((x) << S_TLS_KEYCTX_TX_WR_AADSTRTOFST) 223 #define G_TLS_KEYCTX_TX_WR_AADSTRTOFST(x) \ 224 (((x) >> S_TLS_KEYCTX_TX_WR_AADSTRTOFST) & \ 225 M_TLS_KEYCTX_TX_WR_AADSTRTOFST) 226 227 #define S_TLS_KEYCTX_TX_WR_AADSTOPOFST 39 228 #define M_TLS_KEYCTX_TX_WR_AADSTOPOFST 0xffULL 229 #define V_TLS_KEYCTX_TX_WR_AADSTOPOFST(x) \ 230 ((x) << S_TLS_KEYCTX_TX_WR_AADSTOPOFST) 231 #define G_TLS_KEYCTX_TX_WR_AADSTOPOFST(x) \ 232 (((x) >> S_TLS_KEYCTX_TX_WR_AADSTOPOFST) & \ 233 M_TLS_KEYCTX_TX_WR_AADSTOPOFST) 234 235 #define S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST 30 236 #define M_TLS_KEYCTX_TX_WR_CIPHERSRTOFST 0x1ffULL 237 #define V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(x) \ 238 ((x) << S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST) 239 #define G_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(x) \ 240 (((x) >> S_TLS_KEYCTX_TX_WR_CIPHERSRTOFST) & \ 241 M_TLS_KEYCTX_TX_WR_CIPHERSRTOFST) 242 243 #define S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST 23 244 #define M_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST 0x7f 245 #define V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(x) \ 246 ((x) << S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST) 247 #define G_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(x) \ 248 (((x) >> S_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST) & \ 249 M_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST) 250 251 #define S_TLS_KEYCTX_TX_WR_AUTHSRTOFST 14 252 #define M_TLS_KEYCTX_TX_WR_AUTHSRTOFST 0x1ff 253 #define V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(x) \ 254 ((x) << S_TLS_KEYCTX_TX_WR_AUTHSRTOFST) 255 #define G_TLS_KEYCTX_TX_WR_AUTHSRTOFST(x) \ 256 (((x) >> S_TLS_KEYCTX_TX_WR_AUTHSRTOFST) & \ 257 M_TLS_KEYCTX_TX_WR_AUTHSRTOFST) 258 259 #define S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST 7 260 #define M_TLS_KEYCTX_TX_WR_AUTHSTOPOFST 0x7f 261 #define V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(x) \ 262 ((x) << S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST) 263 #define G_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(x) \ 264 (((x) >> S_TLS_KEYCTX_TX_WR_AUTHSTOPOFST) & \ 265 M_TLS_KEYCTX_TX_WR_AUTHSTOPOFST) 266 267 #define S_TLS_KEYCTX_TX_WR_AUTHINSRT 0 268 #define M_TLS_KEYCTX_TX_WR_AUTHINSRT 0x7f 269 #define V_TLS_KEYCTX_TX_WR_AUTHINSRT(x) \ 270 ((x) << S_TLS_KEYCTX_TX_WR_AUTHINSRT) 271 #define G_TLS_KEYCTX_TX_WR_AUTHINSRT(x) \ 272 (((x) >> S_TLS_KEYCTX_TX_WR_AUTHINSRT) & \ 273 M_TLS_KEYCTX_TX_WR_AUTHINSRT) 274 275 /* Key Context Programming Operation type */ 276 #define KEY_WRITE_RX 0x1 277 #define KEY_WRITE_TX 0x2 278 #define KEY_DELETE_RX 0x4 279 #define KEY_DELETE_TX 0x8 280 281 #define S_KEY_CLR_LOC 4 282 #define M_KEY_CLR_LOC 0xf 283 #define V_KEY_CLR_LOC(x) ((x) << S_KEY_CLR_LOC) 284 #define G_KEY_CLR_LOC(x) (((x) >> S_KEY_CLR_LOC) & M_KEY_CLR_LOC) 285 #define F_KEY_CLR_LOC V_KEY_CLR_LOC(1U) 286 287 #define S_KEY_GET_LOC 0 288 #define M_KEY_GET_LOC 0xf 289 #define V_KEY_GET_LOC(x) ((x) << S_KEY_GET_LOC) 290 #define G_KEY_GET_LOC(x) (((x) >> S_KEY_GET_LOC) & M_KEY_GET_LOC) 291 292 /* 293 * Generate the initial GMAC hash state for a AES-GCM key. 294 * 295 * Borrowed from AES_GMAC_Setkey(). 296 */ 297 void 298 t4_init_gmac_hash(const char *key, int klen, char *ghash) 299 { 300 static char zeroes[GMAC_BLOCK_LEN]; 301 uint32_t keysched[4 * (RIJNDAEL_MAXNR + 1)]; 302 int rounds; 303 304 rounds = rijndaelKeySetupEnc(keysched, key, klen * 8); 305 rijndaelEncrypt(keysched, rounds, zeroes, ghash); 306 explicit_bzero(keysched, sizeof(keysched)); 307 } 308 309 /* Copy out the partial hash state from a software hash implementation. */ 310 void 311 t4_copy_partial_hash(int alg, union authctx *auth_ctx, void *dst) 312 { 313 uint32_t *u32; 314 uint64_t *u64; 315 u_int i; 316 317 u32 = (uint32_t *)dst; 318 u64 = (uint64_t *)dst; 319 switch (alg) { 320 case CRYPTO_SHA1: 321 case CRYPTO_SHA1_HMAC: 322 for (i = 0; i < SHA1_HASH_LEN / 4; i++) 323 u32[i] = htobe32(auth_ctx->sha1ctx.h.b32[i]); 324 break; 325 case CRYPTO_SHA2_224: 326 case CRYPTO_SHA2_224_HMAC: 327 for (i = 0; i < SHA2_256_HASH_LEN / 4; i++) 328 u32[i] = htobe32(auth_ctx->sha224ctx.state[i]); 329 break; 330 case CRYPTO_SHA2_256: 331 case CRYPTO_SHA2_256_HMAC: 332 for (i = 0; i < SHA2_256_HASH_LEN / 4; i++) 333 u32[i] = htobe32(auth_ctx->sha256ctx.state[i]); 334 break; 335 case CRYPTO_SHA2_384: 336 case CRYPTO_SHA2_384_HMAC: 337 for (i = 0; i < SHA2_512_HASH_LEN / 8; i++) 338 u64[i] = htobe64(auth_ctx->sha384ctx.state[i]); 339 break; 340 case CRYPTO_SHA2_512: 341 case CRYPTO_SHA2_512_HMAC: 342 for (i = 0; i < SHA2_512_HASH_LEN / 8; i++) 343 u64[i] = htobe64(auth_ctx->sha512ctx.state[i]); 344 break; 345 } 346 } 347 348 void 349 t4_init_hmac_digest(const struct auth_hash *axf, u_int partial_digest_len, 350 const char *key, int klen, char *dst) 351 { 352 union authctx auth_ctx; 353 354 hmac_init_ipad(axf, key, klen, &auth_ctx); 355 t4_copy_partial_hash(axf->type, &auth_ctx, dst); 356 357 dst += roundup2(partial_digest_len, 16); 358 359 hmac_init_opad(axf, key, klen, &auth_ctx); 360 t4_copy_partial_hash(axf->type, &auth_ctx, dst); 361 362 explicit_bzero(&auth_ctx, sizeof(auth_ctx)); 363 } 364 365 /* 366 * Borrowed from cesa_prep_aes_key(). 367 * 368 * NB: The crypto engine wants the words in the decryption key in reverse 369 * order. 370 */ 371 void 372 t4_aes_getdeckey(void *dec_key, const void *enc_key, unsigned int kbits) 373 { 374 uint32_t ek[4 * (RIJNDAEL_MAXNR + 1)]; 375 uint32_t *dkey; 376 int i; 377 378 rijndaelKeySetupEnc(ek, enc_key, kbits); 379 dkey = dec_key; 380 dkey += (kbits / 8) / 4; 381 382 switch (kbits) { 383 case 128: 384 for (i = 0; i < 4; i++) 385 *--dkey = htobe32(ek[4 * 10 + i]); 386 break; 387 case 192: 388 for (i = 0; i < 2; i++) 389 *--dkey = htobe32(ek[4 * 11 + 2 + i]); 390 for (i = 0; i < 4; i++) 391 *--dkey = htobe32(ek[4 * 12 + i]); 392 break; 393 case 256: 394 for (i = 0; i < 4; i++) 395 *--dkey = htobe32(ek[4 * 13 + i]); 396 for (i = 0; i < 4; i++) 397 *--dkey = htobe32(ek[4 * 14 + i]); 398 break; 399 } 400 MPASS(dkey == dec_key); 401 explicit_bzero(ek, sizeof(ek)); 402 } 403 404 #ifdef KERN_TLS 405 /* 406 * - keyid management 407 * - request to program key? 408 */ 409 u_int 410 t4_tls_key_info_size(const struct ktls_session *tls) 411 { 412 u_int key_info_size, mac_key_size; 413 414 key_info_size = sizeof(struct tx_keyctx_hdr) + 415 tls->params.cipher_key_len; 416 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { 417 key_info_size += GMAC_BLOCK_LEN; 418 } else { 419 switch (tls->params.auth_algorithm) { 420 case CRYPTO_SHA1_HMAC: 421 mac_key_size = SHA1_HASH_LEN; 422 break; 423 case CRYPTO_SHA2_256_HMAC: 424 mac_key_size = SHA2_256_HASH_LEN; 425 break; 426 case CRYPTO_SHA2_384_HMAC: 427 mac_key_size = SHA2_512_HASH_LEN; 428 break; 429 default: 430 __assert_unreachable(); 431 } 432 key_info_size += roundup2(mac_key_size, 16) * 2; 433 } 434 return (key_info_size); 435 } 436 437 int 438 t4_tls_proto_ver(const struct ktls_session *tls) 439 { 440 switch (tls->params.tls_vminor) { 441 case TLS_MINOR_VER_ONE: 442 return (SCMD_PROTO_VERSION_TLS_1_1); 443 case TLS_MINOR_VER_TWO: 444 return (SCMD_PROTO_VERSION_TLS_1_2); 445 case TLS_MINOR_VER_THREE: 446 return (SCMD_PROTO_VERSION_TLS_1_3); 447 default: 448 __assert_unreachable(); 449 } 450 } 451 452 int 453 t4_tls_cipher_mode(const struct ktls_session *tls) 454 { 455 switch (tls->params.cipher_algorithm) { 456 case CRYPTO_AES_CBC: 457 return (SCMD_CIPH_MODE_AES_CBC); 458 case CRYPTO_AES_NIST_GCM_16: 459 return (SCMD_CIPH_MODE_AES_GCM); 460 default: 461 return (SCMD_CIPH_MODE_NOP); 462 } 463 } 464 465 int 466 t4_tls_auth_mode(const struct ktls_session *tls) 467 { 468 switch (tls->params.cipher_algorithm) { 469 case CRYPTO_AES_CBC: 470 switch (tls->params.auth_algorithm) { 471 case CRYPTO_SHA1_HMAC: 472 return (SCMD_AUTH_MODE_SHA1); 473 case CRYPTO_SHA2_256_HMAC: 474 return (SCMD_AUTH_MODE_SHA256); 475 case CRYPTO_SHA2_384_HMAC: 476 return (SCMD_AUTH_MODE_SHA512_384); 477 default: 478 return (SCMD_AUTH_MODE_NOP); 479 } 480 case CRYPTO_AES_NIST_GCM_16: 481 return (SCMD_AUTH_MODE_GHASH); 482 default: 483 return (SCMD_AUTH_MODE_NOP); 484 } 485 } 486 487 int 488 t4_tls_hmac_ctrl(const struct ktls_session *tls) 489 { 490 switch (tls->params.cipher_algorithm) { 491 case CRYPTO_AES_CBC: 492 return (SCMD_HMAC_CTRL_NO_TRUNC); 493 case CRYPTO_AES_NIST_GCM_16: 494 return (SCMD_HMAC_CTRL_NOP); 495 default: 496 return (SCMD_HMAC_CTRL_NOP); 497 } 498 } 499 500 static int 501 tls_seqnum_ctrl(const struct ktls_session *tls) 502 { 503 switch (tls->params.tls_vminor) { 504 case TLS_MINOR_VER_THREE: 505 return (0); 506 default: 507 return (3); 508 } 509 } 510 511 static int 512 tls_cipher_key_size(const struct ktls_session *tls) 513 { 514 switch (tls->params.cipher_key_len) { 515 case 128 / 8: 516 return (CHCR_KEYCTX_CIPHER_KEY_SIZE_128); 517 case 192 / 8: 518 return (CHCR_KEYCTX_CIPHER_KEY_SIZE_192); 519 case 256 / 8: 520 return (CHCR_KEYCTX_CIPHER_KEY_SIZE_256); 521 default: 522 __assert_unreachable(); 523 } 524 } 525 526 static int 527 tls_mac_key_size(const struct ktls_session *tls) 528 { 529 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) 530 return (CHCR_KEYCTX_MAC_KEY_SIZE_512); 531 else { 532 switch (tls->params.auth_algorithm) { 533 case CRYPTO_SHA1_HMAC: 534 return (CHCR_KEYCTX_MAC_KEY_SIZE_160); 535 case CRYPTO_SHA2_256_HMAC: 536 return (CHCR_KEYCTX_MAC_KEY_SIZE_256); 537 case CRYPTO_SHA2_384_HMAC: 538 return (CHCR_KEYCTX_MAC_KEY_SIZE_512); 539 default: 540 __assert_unreachable(); 541 } 542 } 543 } 544 545 void 546 t4_tls_key_ctx(const struct ktls_session *tls, int direction, 547 struct tls_keyctx *kctx) 548 { 549 const struct auth_hash *axf; 550 u_int mac_key_size; 551 char *hash; 552 553 /* Key context header. */ 554 if (direction == KTLS_TX) { 555 kctx->u.txhdr.ctxlen = t4_tls_key_info_size(tls) / 16; 556 kctx->u.txhdr.dualck_to_txvalid = 557 V_TLS_KEYCTX_TX_WR_SALT_PRESENT(1) | 558 V_TLS_KEYCTX_TX_WR_TXCK_SIZE(tls_cipher_key_size(tls)) | 559 V_TLS_KEYCTX_TX_WR_TXMK_SIZE(tls_mac_key_size(tls)) | 560 V_TLS_KEYCTX_TX_WR_TXVALID(1); 561 if (tls->params.cipher_algorithm == CRYPTO_AES_CBC) 562 kctx->u.txhdr.dualck_to_txvalid |= 563 V_TLS_KEYCTX_TX_WR_TXOPAD_PRESENT(1); 564 kctx->u.txhdr.dualck_to_txvalid = 565 htobe16(kctx->u.txhdr.dualck_to_txvalid); 566 } else { 567 kctx->u.rxhdr.flitcnt_hmacctrl = 568 V_TLS_KEYCTX_TX_WR_FLITCNT(t4_tls_key_info_size(tls) / 16) | 569 V_TLS_KEYCTX_TX_WR_HMACCTRL(t4_tls_hmac_ctrl(tls)); 570 571 kctx->u.rxhdr.protover_ciphmode = 572 V_TLS_KEYCTX_TX_WR_PROTOVER(t4_tls_proto_ver(tls)) | 573 V_TLS_KEYCTX_TX_WR_CIPHMODE(t4_tls_cipher_mode(tls)); 574 575 kctx->u.rxhdr.authmode_to_rxvalid = 576 V_TLS_KEYCTX_TX_WR_AUTHMODE(t4_tls_auth_mode(tls)) | 577 V_TLS_KEYCTX_TX_WR_SEQNUMCTRL(tls_seqnum_ctrl(tls)) | 578 V_TLS_KEYCTX_TX_WR_RXVALID(1); 579 580 kctx->u.rxhdr.ivpresent_to_rxmk_size = 581 V_TLS_KEYCTX_TX_WR_IVPRESENT(0) | 582 V_TLS_KEYCTX_TX_WR_RXCK_SIZE(tls_cipher_key_size(tls)) | 583 V_TLS_KEYCTX_TX_WR_RXMK_SIZE(tls_mac_key_size(tls)); 584 585 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { 586 kctx->u.rxhdr.ivinsert_to_authinsrt = 587 htobe64(V_TLS_KEYCTX_TX_WR_IVINSERT(6ULL) | 588 V_TLS_KEYCTX_TX_WR_AADSTRTOFST(1ULL) | 589 V_TLS_KEYCTX_TX_WR_AADSTOPOFST(5ULL) | 590 V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(14ULL) | 591 V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(16ULL) | 592 V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(14ULL) | 593 V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(0ULL) | 594 V_TLS_KEYCTX_TX_WR_AUTHINSRT(16ULL)); 595 } else { 596 kctx->u.rxhdr.authmode_to_rxvalid |= 597 V_TLS_KEYCTX_TX_WR_CIPHAUTHSEQCTRL(1); 598 kctx->u.rxhdr.ivpresent_to_rxmk_size |= 599 V_TLS_KEYCTX_TX_WR_RXOPAD_PRESENT(1); 600 kctx->u.rxhdr.ivinsert_to_authinsrt = 601 htobe64(V_TLS_KEYCTX_TX_WR_IVINSERT(6ULL) | 602 V_TLS_KEYCTX_TX_WR_AADSTRTOFST(1ULL) | 603 V_TLS_KEYCTX_TX_WR_AADSTOPOFST(5ULL) | 604 V_TLS_KEYCTX_TX_WR_AUTHSRTOFST(22ULL) | 605 V_TLS_KEYCTX_TX_WR_AUTHSTOPOFST(0ULL) | 606 V_TLS_KEYCTX_TX_WR_CIPHERSRTOFST(22ULL) | 607 V_TLS_KEYCTX_TX_WR_CIPHERSTOPOFST(0ULL) | 608 V_TLS_KEYCTX_TX_WR_AUTHINSRT(0ULL)); 609 } 610 } 611 612 /* Key. */ 613 if (direction == KTLS_RX && 614 tls->params.cipher_algorithm == CRYPTO_AES_CBC) 615 t4_aes_getdeckey(kctx->keys.edkey, tls->params.cipher_key, 616 tls->params.cipher_key_len * 8); 617 else 618 memcpy(kctx->keys.edkey, tls->params.cipher_key, 619 tls->params.cipher_key_len); 620 621 /* Auth state and implicit IV (salt). */ 622 hash = kctx->keys.edkey + tls->params.cipher_key_len; 623 if (tls->params.cipher_algorithm == CRYPTO_AES_NIST_GCM_16) { 624 _Static_assert(offsetof(struct tx_keyctx_hdr, txsalt) == 625 offsetof(struct rx_keyctx_hdr, rxsalt), 626 "salt offset mismatch"); 627 memcpy(kctx->u.txhdr.txsalt, tls->params.iv, 628 tls->params.iv_len); 629 t4_init_gmac_hash(tls->params.cipher_key, 630 tls->params.cipher_key_len, hash); 631 } else { 632 switch (tls->params.auth_algorithm) { 633 case CRYPTO_SHA1_HMAC: 634 axf = &auth_hash_hmac_sha1; 635 mac_key_size = SHA1_HASH_LEN; 636 break; 637 case CRYPTO_SHA2_256_HMAC: 638 axf = &auth_hash_hmac_sha2_256; 639 mac_key_size = SHA2_256_HASH_LEN; 640 break; 641 case CRYPTO_SHA2_384_HMAC: 642 axf = &auth_hash_hmac_sha2_384; 643 mac_key_size = SHA2_512_HASH_LEN; 644 break; 645 default: 646 __assert_unreachable(); 647 } 648 t4_init_hmac_digest(axf, mac_key_size, tls->params.auth_key, 649 tls->params.auth_key_len, hash); 650 } 651 } 652 653 int 654 t4_alloc_tls_keyid(struct adapter *sc) 655 { 656 vmem_addr_t addr; 657 658 if (sc->vres.key.size == 0) 659 return (-1); 660 661 if (vmem_alloc(sc->key_map, TLS_KEY_CONTEXT_SZ, M_NOWAIT | M_FIRSTFIT, 662 &addr) != 0) 663 return (-1); 664 665 return (addr); 666 } 667 668 void 669 t4_free_tls_keyid(struct adapter *sc, int keyid) 670 { 671 vmem_free(sc->key_map, keyid, TLS_KEY_CONTEXT_SZ); 672 } 673 674 void 675 t4_write_tlskey_wr(const struct ktls_session *tls, int direction, int tid, 676 int flags, int keyid, struct tls_key_req *kwr) 677 { 678 kwr->wr_hi = htobe32(V_FW_WR_OP(FW_ULPTX_WR) | F_FW_WR_ATOMIC | flags); 679 kwr->wr_mid = htobe32(V_FW_WR_LEN16(DIV_ROUND_UP(TLS_KEY_WR_SZ, 16)) | 680 V_FW_WR_FLOWID(tid)); 681 kwr->protocol = t4_tls_proto_ver(tls); 682 kwr->mfs = htobe16(tls->params.max_frame_len); 683 kwr->reneg_to_write_rx = V_KEY_GET_LOC(direction == KTLS_TX ? 684 KEY_WRITE_TX : KEY_WRITE_RX); 685 686 /* We don't need to use V_T7_ULP_MEMIO_DATA_LEN in this routine. */ 687 _Static_assert(V_T7_ULP_MEMIO_DATA_LEN(TLS_KEY_CONTEXT_SZ >> 5) == 688 V_ULP_MEMIO_DATA_LEN(TLS_KEY_CONTEXT_SZ >> 5), "datalen mismatch"); 689 690 /* master command */ 691 kwr->cmd = htobe32(V_ULPTX_CMD(ULP_TX_MEM_WRITE) | 692 V_T5_ULP_MEMIO_ORDER(1) | V_T5_ULP_MEMIO_IMM(1)); 693 kwr->dlen = htobe32(V_ULP_MEMIO_DATA_LEN(TLS_KEY_CONTEXT_SZ >> 5)); 694 kwr->len16 = htobe32((tid << 8) | 695 DIV_ROUND_UP(TLS_KEY_WR_SZ - sizeof(struct work_request_hdr), 16)); 696 kwr->kaddr = htobe32(V_ULP_MEMIO_ADDR(keyid >> 5)); 697 698 /* sub command */ 699 kwr->sc_more = htobe32(V_ULPTX_CMD(ULP_TX_SC_IMM)); 700 kwr->sc_len = htobe32(TLS_KEY_CONTEXT_SZ); 701 } 702 #endif 703