1 /* ==================================================================== 2 * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in 13 * the documentation and/or other materials provided with the 14 * distribution. 15 * 16 * 3. All advertising materials mentioning features or use of this 17 * software must display the following acknowledgment: 18 * "This product includes software developed by the OpenSSL Project 19 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" 20 * 21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to 22 * endorse or promote products derived from this software without 23 * prior written permission. For written permission, please contact 24 * openssl-core@openssl.org. 25 * 26 * 5. Products derived from this software may not be called "OpenSSL" 27 * nor may "OpenSSL" appear in their names without prior written 28 * permission of the OpenSSL Project. 29 * 30 * 6. Redistributions of any form whatsoever must retain the following 31 * acknowledgment: 32 * "This product includes software developed by the OpenSSL Project 33 * for use in the OpenSSL Toolkit (http://www.openssl.org/)" 34 * 35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY 36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR 39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED 46 * OF THE POSSIBILITY OF SUCH DAMAGE. 47 * ==================================================================== 48 * 49 */ 50 51 #include <openssl/opensslconf.h> 52 #ifndef OPENSSL_NO_AES 53 #include <openssl/crypto.h> 54 # include <openssl/evp.h> 55 # include <openssl/err.h> 56 # include <string.h> 57 # include <assert.h> 58 # include <openssl/aes.h> 59 # include "evp_locl.h" 60 # include "modes_lcl.h" 61 # include <openssl/rand.h> 62 63 # undef EVP_CIPH_FLAG_FIPS 64 # define EVP_CIPH_FLAG_FIPS 0 65 66 typedef struct { 67 union { 68 double align; 69 AES_KEY ks; 70 } ks; 71 block128_f block; 72 union { 73 cbc128_f cbc; 74 ctr128_f ctr; 75 } stream; 76 } EVP_AES_KEY; 77 78 typedef struct { 79 union { 80 double align; 81 AES_KEY ks; 82 } ks; /* AES key schedule to use */ 83 int key_set; /* Set if key initialised */ 84 int iv_set; /* Set if an iv is set */ 85 GCM128_CONTEXT gcm; 86 unsigned char *iv; /* Temporary IV store */ 87 int ivlen; /* IV length */ 88 int taglen; 89 int iv_gen; /* It is OK to generate IVs */ 90 int tls_aad_len; /* TLS AAD length */ 91 ctr128_f ctr; 92 } EVP_AES_GCM_CTX; 93 94 typedef struct { 95 union { 96 double align; 97 AES_KEY ks; 98 } ks1, ks2; /* AES key schedules to use */ 99 XTS128_CONTEXT xts; 100 void (*stream) (const unsigned char *in, 101 unsigned char *out, size_t length, 102 const AES_KEY *key1, const AES_KEY *key2, 103 const unsigned char iv[16]); 104 } EVP_AES_XTS_CTX; 105 106 typedef struct { 107 union { 108 double align; 109 AES_KEY ks; 110 } ks; /* AES key schedule to use */ 111 int key_set; /* Set if key initialised */ 112 int iv_set; /* Set if an iv is set */ 113 int tag_set; /* Set if tag is valid */ 114 int len_set; /* Set if message length set */ 115 int L, M; /* L and M parameters from RFC3610 */ 116 CCM128_CONTEXT ccm; 117 ccm128_f str; 118 } EVP_AES_CCM_CTX; 119 120 # define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4)) 121 122 # ifdef VPAES_ASM 123 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits, 124 AES_KEY *key); 125 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits, 126 AES_KEY *key); 127 128 void vpaes_encrypt(const unsigned char *in, unsigned char *out, 129 const AES_KEY *key); 130 void vpaes_decrypt(const unsigned char *in, unsigned char *out, 131 const AES_KEY *key); 132 133 void vpaes_cbc_encrypt(const unsigned char *in, 134 unsigned char *out, 135 size_t length, 136 const AES_KEY *key, unsigned char *ivec, int enc); 137 # endif 138 # ifdef BSAES_ASM 139 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out, 140 size_t length, const AES_KEY *key, 141 unsigned char ivec[16], int enc); 142 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, 143 size_t len, const AES_KEY *key, 144 const unsigned char ivec[16]); 145 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out, 146 size_t len, const AES_KEY *key1, 147 const AES_KEY *key2, const unsigned char iv[16]); 148 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out, 149 size_t len, const AES_KEY *key1, 150 const AES_KEY *key2, const unsigned char iv[16]); 151 # endif 152 # ifdef AES_CTR_ASM 153 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out, 154 size_t blocks, const AES_KEY *key, 155 const unsigned char ivec[AES_BLOCK_SIZE]); 156 # endif 157 # ifdef AES_XTS_ASM 158 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len, 159 const AES_KEY *key1, const AES_KEY *key2, 160 const unsigned char iv[16]); 161 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len, 162 const AES_KEY *key1, const AES_KEY *key2, 163 const unsigned char iv[16]); 164 # endif 165 166 # if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC)) 167 # include "ppc_arch.h" 168 # ifdef VPAES_ASM 169 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC) 170 # endif 171 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207) 172 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key 173 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key 174 # define HWAES_encrypt aes_p8_encrypt 175 # define HWAES_decrypt aes_p8_decrypt 176 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt 177 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks 178 # endif 179 180 # if defined(AES_ASM) && !defined(I386_ONLY) && ( \ 181 ((defined(__i386) || defined(__i386__) || \ 182 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \ 183 defined(__x86_64) || defined(__x86_64__) || \ 184 defined(_M_AMD64) || defined(_M_X64) || \ 185 defined(__INTEL__) ) 186 187 extern unsigned int OPENSSL_ia32cap_P[]; 188 189 # ifdef VPAES_ASM 190 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) 191 # endif 192 # ifdef BSAES_ASM 193 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) 194 # endif 195 /* 196 * AES-NI section 197 */ 198 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32))) 199 200 int aesni_set_encrypt_key(const unsigned char *userKey, int bits, 201 AES_KEY *key); 202 int aesni_set_decrypt_key(const unsigned char *userKey, int bits, 203 AES_KEY *key); 204 205 void aesni_encrypt(const unsigned char *in, unsigned char *out, 206 const AES_KEY *key); 207 void aesni_decrypt(const unsigned char *in, unsigned char *out, 208 const AES_KEY *key); 209 210 void aesni_ecb_encrypt(const unsigned char *in, 211 unsigned char *out, 212 size_t length, const AES_KEY *key, int enc); 213 void aesni_cbc_encrypt(const unsigned char *in, 214 unsigned char *out, 215 size_t length, 216 const AES_KEY *key, unsigned char *ivec, int enc); 217 218 void aesni_ctr32_encrypt_blocks(const unsigned char *in, 219 unsigned char *out, 220 size_t blocks, 221 const void *key, const unsigned char *ivec); 222 223 void aesni_xts_encrypt(const unsigned char *in, 224 unsigned char *out, 225 size_t length, 226 const AES_KEY *key1, const AES_KEY *key2, 227 const unsigned char iv[16]); 228 229 void aesni_xts_decrypt(const unsigned char *in, 230 unsigned char *out, 231 size_t length, 232 const AES_KEY *key1, const AES_KEY *key2, 233 const unsigned char iv[16]); 234 235 void aesni_ccm64_encrypt_blocks(const unsigned char *in, 236 unsigned char *out, 237 size_t blocks, 238 const void *key, 239 const unsigned char ivec[16], 240 unsigned char cmac[16]); 241 242 void aesni_ccm64_decrypt_blocks(const unsigned char *in, 243 unsigned char *out, 244 size_t blocks, 245 const void *key, 246 const unsigned char ivec[16], 247 unsigned char cmac[16]); 248 249 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64) 250 size_t aesni_gcm_encrypt(const unsigned char *in, 251 unsigned char *out, 252 size_t len, 253 const void *key, unsigned char ivec[16], u64 *Xi); 254 # define AES_gcm_encrypt aesni_gcm_encrypt 255 size_t aesni_gcm_decrypt(const unsigned char *in, 256 unsigned char *out, 257 size_t len, 258 const void *key, unsigned char ivec[16], u64 *Xi); 259 # define AES_gcm_decrypt aesni_gcm_decrypt 260 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in, 261 size_t len); 262 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \ 263 gctx->gcm.ghash==gcm_ghash_avx) 264 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \ 265 gctx->gcm.ghash==gcm_ghash_avx) 266 # undef AES_GCM_ASM2 /* minor size optimization */ 267 # endif 268 269 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 270 const unsigned char *iv, int enc) 271 { 272 int ret, mode; 273 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 274 275 mode = ctx->cipher->flags & EVP_CIPH_MODE; 276 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 277 && !enc) { 278 ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data); 279 dat->block = (block128_f) aesni_decrypt; 280 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 281 (cbc128_f) aesni_cbc_encrypt : NULL; 282 } else { 283 ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data); 284 dat->block = (block128_f) aesni_encrypt; 285 if (mode == EVP_CIPH_CBC_MODE) 286 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt; 287 else if (mode == EVP_CIPH_CTR_MODE) 288 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; 289 else 290 dat->stream.cbc = NULL; 291 } 292 293 if (ret < 0) { 294 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 295 return 0; 296 } 297 298 return 1; 299 } 300 301 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 302 const unsigned char *in, size_t len) 303 { 304 aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt); 305 306 return 1; 307 } 308 309 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 310 const unsigned char *in, size_t len) 311 { 312 size_t bl = ctx->cipher->block_size; 313 314 if (len < bl) 315 return 1; 316 317 aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt); 318 319 return 1; 320 } 321 322 # define aesni_ofb_cipher aes_ofb_cipher 323 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 324 const unsigned char *in, size_t len); 325 326 # define aesni_cfb_cipher aes_cfb_cipher 327 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 328 const unsigned char *in, size_t len); 329 330 # define aesni_cfb8_cipher aes_cfb8_cipher 331 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 332 const unsigned char *in, size_t len); 333 334 # define aesni_cfb1_cipher aes_cfb1_cipher 335 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 336 const unsigned char *in, size_t len); 337 338 # define aesni_ctr_cipher aes_ctr_cipher 339 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 340 const unsigned char *in, size_t len); 341 342 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 343 const unsigned char *iv, int enc) 344 { 345 EVP_AES_GCM_CTX *gctx = ctx->cipher_data; 346 if (!iv && !key) 347 return 1; 348 if (key) { 349 aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 350 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt); 351 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; 352 /* 353 * If we have an iv can set it directly, otherwise use saved IV. 354 */ 355 if (iv == NULL && gctx->iv_set) 356 iv = gctx->iv; 357 if (iv) { 358 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 359 gctx->iv_set = 1; 360 } 361 gctx->key_set = 1; 362 } else { 363 /* If key set use IV, otherwise copy */ 364 if (gctx->key_set) 365 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 366 else 367 memcpy(gctx->iv, iv, gctx->ivlen); 368 gctx->iv_set = 1; 369 gctx->iv_gen = 0; 370 } 371 return 1; 372 } 373 374 # define aesni_gcm_cipher aes_gcm_cipher 375 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 376 const unsigned char *in, size_t len); 377 378 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 379 const unsigned char *iv, int enc) 380 { 381 EVP_AES_XTS_CTX *xctx = ctx->cipher_data; 382 if (!iv && !key) 383 return 1; 384 385 if (key) { 386 /* key_len is two AES keys */ 387 if (enc) { 388 aesni_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks); 389 xctx->xts.block1 = (block128_f) aesni_encrypt; 390 xctx->stream = aesni_xts_encrypt; 391 } else { 392 aesni_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks); 393 xctx->xts.block1 = (block128_f) aesni_decrypt; 394 xctx->stream = aesni_xts_decrypt; 395 } 396 397 aesni_set_encrypt_key(key + ctx->key_len / 2, 398 ctx->key_len * 4, &xctx->ks2.ks); 399 xctx->xts.block2 = (block128_f) aesni_encrypt; 400 401 xctx->xts.key1 = &xctx->ks1; 402 } 403 404 if (iv) { 405 xctx->xts.key2 = &xctx->ks2; 406 memcpy(ctx->iv, iv, 16); 407 } 408 409 return 1; 410 } 411 412 # define aesni_xts_cipher aes_xts_cipher 413 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 414 const unsigned char *in, size_t len); 415 416 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 417 const unsigned char *iv, int enc) 418 { 419 EVP_AES_CCM_CTX *cctx = ctx->cipher_data; 420 if (!iv && !key) 421 return 1; 422 if (key) { 423 aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks); 424 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 425 &cctx->ks, (block128_f) aesni_encrypt); 426 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks : 427 (ccm128_f) aesni_ccm64_decrypt_blocks; 428 cctx->key_set = 1; 429 } 430 if (iv) { 431 memcpy(ctx->iv, iv, 15 - cctx->L); 432 cctx->iv_set = 1; 433 } 434 return 1; 435 } 436 437 # define aesni_ccm_cipher aes_ccm_cipher 438 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 439 const unsigned char *in, size_t len); 440 441 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 442 static const EVP_CIPHER aesni_##keylen##_##mode = { \ 443 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 444 flags|EVP_CIPH_##MODE##_MODE, \ 445 aesni_init_key, \ 446 aesni_##mode##_cipher, \ 447 NULL, \ 448 sizeof(EVP_AES_KEY), \ 449 NULL,NULL,NULL,NULL }; \ 450 static const EVP_CIPHER aes_##keylen##_##mode = { \ 451 nid##_##keylen##_##nmode,blocksize, \ 452 keylen/8,ivlen, \ 453 flags|EVP_CIPH_##MODE##_MODE, \ 454 aes_init_key, \ 455 aes_##mode##_cipher, \ 456 NULL, \ 457 sizeof(EVP_AES_KEY), \ 458 NULL,NULL,NULL,NULL }; \ 459 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 460 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } 461 462 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 463 static const EVP_CIPHER aesni_##keylen##_##mode = { \ 464 nid##_##keylen##_##mode,blocksize, \ 465 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 466 flags|EVP_CIPH_##MODE##_MODE, \ 467 aesni_##mode##_init_key, \ 468 aesni_##mode##_cipher, \ 469 aes_##mode##_cleanup, \ 470 sizeof(EVP_AES_##MODE##_CTX), \ 471 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 472 static const EVP_CIPHER aes_##keylen##_##mode = { \ 473 nid##_##keylen##_##mode,blocksize, \ 474 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 475 flags|EVP_CIPH_##MODE##_MODE, \ 476 aes_##mode##_init_key, \ 477 aes_##mode##_cipher, \ 478 aes_##mode##_cleanup, \ 479 sizeof(EVP_AES_##MODE##_CTX), \ 480 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 481 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 482 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } 483 484 # elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__)) 485 486 # include "sparc_arch.h" 487 488 extern unsigned int OPENSSL_sparcv9cap_P[]; 489 490 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES) 491 492 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks); 493 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks); 494 void aes_t4_encrypt(const unsigned char *in, unsigned char *out, 495 const AES_KEY *key); 496 void aes_t4_decrypt(const unsigned char *in, unsigned char *out, 497 const AES_KEY *key); 498 /* 499 * Key-length specific subroutines were chosen for following reason. 500 * Each SPARC T4 core can execute up to 8 threads which share core's 501 * resources. Loading as much key material to registers allows to 502 * minimize references to shared memory interface, as well as amount 503 * of instructions in inner loops [much needed on T4]. But then having 504 * non-key-length specific routines would require conditional branches 505 * either in inner loops or on subroutines' entries. Former is hardly 506 * acceptable, while latter means code size increase to size occupied 507 * by multiple key-length specfic subroutines, so why fight? 508 */ 509 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 510 size_t len, const AES_KEY *key, 511 unsigned char *ivec); 512 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 513 size_t len, const AES_KEY *key, 514 unsigned char *ivec); 515 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 516 size_t len, const AES_KEY *key, 517 unsigned char *ivec); 518 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 519 size_t len, const AES_KEY *key, 520 unsigned char *ivec); 521 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 522 size_t len, const AES_KEY *key, 523 unsigned char *ivec); 524 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 525 size_t len, const AES_KEY *key, 526 unsigned char *ivec); 527 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 528 size_t blocks, const AES_KEY *key, 529 unsigned char *ivec); 530 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 531 size_t blocks, const AES_KEY *key, 532 unsigned char *ivec); 533 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 534 size_t blocks, const AES_KEY *key, 535 unsigned char *ivec); 536 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out, 537 size_t blocks, const AES_KEY *key1, 538 const AES_KEY *key2, const unsigned char *ivec); 539 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out, 540 size_t blocks, const AES_KEY *key1, 541 const AES_KEY *key2, const unsigned char *ivec); 542 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out, 543 size_t blocks, const AES_KEY *key1, 544 const AES_KEY *key2, const unsigned char *ivec); 545 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out, 546 size_t blocks, const AES_KEY *key1, 547 const AES_KEY *key2, const unsigned char *ivec); 548 549 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 550 const unsigned char *iv, int enc) 551 { 552 int ret, mode, bits; 553 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 554 555 mode = ctx->cipher->flags & EVP_CIPH_MODE; 556 bits = ctx->key_len * 8; 557 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 558 && !enc) { 559 ret = 0; 560 aes_t4_set_decrypt_key(key, bits, ctx->cipher_data); 561 dat->block = (block128_f) aes_t4_decrypt; 562 switch (bits) { 563 case 128: 564 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 565 (cbc128_f) aes128_t4_cbc_decrypt : NULL; 566 break; 567 case 192: 568 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 569 (cbc128_f) aes192_t4_cbc_decrypt : NULL; 570 break; 571 case 256: 572 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 573 (cbc128_f) aes256_t4_cbc_decrypt : NULL; 574 break; 575 default: 576 ret = -1; 577 } 578 } else { 579 ret = 0; 580 aes_t4_set_encrypt_key(key, bits, ctx->cipher_data); 581 dat->block = (block128_f) aes_t4_encrypt; 582 switch (bits) { 583 case 128: 584 if (mode == EVP_CIPH_CBC_MODE) 585 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt; 586 else if (mode == EVP_CIPH_CTR_MODE) 587 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt; 588 else 589 dat->stream.cbc = NULL; 590 break; 591 case 192: 592 if (mode == EVP_CIPH_CBC_MODE) 593 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt; 594 else if (mode == EVP_CIPH_CTR_MODE) 595 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt; 596 else 597 dat->stream.cbc = NULL; 598 break; 599 case 256: 600 if (mode == EVP_CIPH_CBC_MODE) 601 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt; 602 else if (mode == EVP_CIPH_CTR_MODE) 603 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt; 604 else 605 dat->stream.cbc = NULL; 606 break; 607 default: 608 ret = -1; 609 } 610 } 611 612 if (ret < 0) { 613 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 614 return 0; 615 } 616 617 return 1; 618 } 619 620 # define aes_t4_cbc_cipher aes_cbc_cipher 621 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 622 const unsigned char *in, size_t len); 623 624 # define aes_t4_ecb_cipher aes_ecb_cipher 625 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 626 const unsigned char *in, size_t len); 627 628 # define aes_t4_ofb_cipher aes_ofb_cipher 629 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 630 const unsigned char *in, size_t len); 631 632 # define aes_t4_cfb_cipher aes_cfb_cipher 633 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 634 const unsigned char *in, size_t len); 635 636 # define aes_t4_cfb8_cipher aes_cfb8_cipher 637 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 638 const unsigned char *in, size_t len); 639 640 # define aes_t4_cfb1_cipher aes_cfb1_cipher 641 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 642 const unsigned char *in, size_t len); 643 644 # define aes_t4_ctr_cipher aes_ctr_cipher 645 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 646 const unsigned char *in, size_t len); 647 648 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 649 const unsigned char *iv, int enc) 650 { 651 EVP_AES_GCM_CTX *gctx = ctx->cipher_data; 652 if (!iv && !key) 653 return 1; 654 if (key) { 655 int bits = ctx->key_len * 8; 656 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks); 657 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 658 (block128_f) aes_t4_encrypt); 659 switch (bits) { 660 case 128: 661 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt; 662 break; 663 case 192: 664 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt; 665 break; 666 case 256: 667 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt; 668 break; 669 default: 670 return 0; 671 } 672 /* 673 * If we have an iv can set it directly, otherwise use saved IV. 674 */ 675 if (iv == NULL && gctx->iv_set) 676 iv = gctx->iv; 677 if (iv) { 678 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 679 gctx->iv_set = 1; 680 } 681 gctx->key_set = 1; 682 } else { 683 /* If key set use IV, otherwise copy */ 684 if (gctx->key_set) 685 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 686 else 687 memcpy(gctx->iv, iv, gctx->ivlen); 688 gctx->iv_set = 1; 689 gctx->iv_gen = 0; 690 } 691 return 1; 692 } 693 694 # define aes_t4_gcm_cipher aes_gcm_cipher 695 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 696 const unsigned char *in, size_t len); 697 698 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 699 const unsigned char *iv, int enc) 700 { 701 EVP_AES_XTS_CTX *xctx = ctx->cipher_data; 702 if (!iv && !key) 703 return 1; 704 705 if (key) { 706 int bits = ctx->key_len * 4; 707 xctx->stream = NULL; 708 /* key_len is two AES keys */ 709 if (enc) { 710 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks); 711 xctx->xts.block1 = (block128_f) aes_t4_encrypt; 712 switch (bits) { 713 case 128: 714 xctx->stream = aes128_t4_xts_encrypt; 715 break; 716 # if 0 /* not yet */ 717 case 192: 718 xctx->stream = aes192_t4_xts_encrypt; 719 break; 720 # endif 721 case 256: 722 xctx->stream = aes256_t4_xts_encrypt; 723 break; 724 default: 725 return 0; 726 } 727 } else { 728 aes_t4_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks); 729 xctx->xts.block1 = (block128_f) aes_t4_decrypt; 730 switch (bits) { 731 case 128: 732 xctx->stream = aes128_t4_xts_decrypt; 733 break; 734 # if 0 /* not yet */ 735 case 192: 736 xctx->stream = aes192_t4_xts_decrypt; 737 break; 738 # endif 739 case 256: 740 xctx->stream = aes256_t4_xts_decrypt; 741 break; 742 default: 743 return 0; 744 } 745 } 746 747 aes_t4_set_encrypt_key(key + ctx->key_len / 2, 748 ctx->key_len * 4, &xctx->ks2.ks); 749 xctx->xts.block2 = (block128_f) aes_t4_encrypt; 750 751 xctx->xts.key1 = &xctx->ks1; 752 } 753 754 if (iv) { 755 xctx->xts.key2 = &xctx->ks2; 756 memcpy(ctx->iv, iv, 16); 757 } 758 759 return 1; 760 } 761 762 # define aes_t4_xts_cipher aes_xts_cipher 763 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 764 const unsigned char *in, size_t len); 765 766 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 767 const unsigned char *iv, int enc) 768 { 769 EVP_AES_CCM_CTX *cctx = ctx->cipher_data; 770 if (!iv && !key) 771 return 1; 772 if (key) { 773 int bits = ctx->key_len * 8; 774 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks); 775 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 776 &cctx->ks, (block128_f) aes_t4_encrypt); 777 # if 0 /* not yet */ 778 switch (bits) { 779 case 128: 780 cctx->str = enc ? (ccm128_f) aes128_t4_ccm64_encrypt : 781 (ccm128_f) ae128_t4_ccm64_decrypt; 782 break; 783 case 192: 784 cctx->str = enc ? (ccm128_f) aes192_t4_ccm64_encrypt : 785 (ccm128_f) ae192_t4_ccm64_decrypt; 786 break; 787 case 256: 788 cctx->str = enc ? (ccm128_f) aes256_t4_ccm64_encrypt : 789 (ccm128_f) ae256_t4_ccm64_decrypt; 790 break; 791 default: 792 return 0; 793 } 794 # else 795 cctx->str = NULL; 796 # endif 797 cctx->key_set = 1; 798 } 799 if (iv) { 800 memcpy(ctx->iv, iv, 15 - cctx->L); 801 cctx->iv_set = 1; 802 } 803 return 1; 804 } 805 806 # define aes_t4_ccm_cipher aes_ccm_cipher 807 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 808 const unsigned char *in, size_t len); 809 810 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 811 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \ 812 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 813 flags|EVP_CIPH_##MODE##_MODE, \ 814 aes_t4_init_key, \ 815 aes_t4_##mode##_cipher, \ 816 NULL, \ 817 sizeof(EVP_AES_KEY), \ 818 NULL,NULL,NULL,NULL }; \ 819 static const EVP_CIPHER aes_##keylen##_##mode = { \ 820 nid##_##keylen##_##nmode,blocksize, \ 821 keylen/8,ivlen, \ 822 flags|EVP_CIPH_##MODE##_MODE, \ 823 aes_init_key, \ 824 aes_##mode##_cipher, \ 825 NULL, \ 826 sizeof(EVP_AES_KEY), \ 827 NULL,NULL,NULL,NULL }; \ 828 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 829 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; } 830 831 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 832 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \ 833 nid##_##keylen##_##mode,blocksize, \ 834 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 835 flags|EVP_CIPH_##MODE##_MODE, \ 836 aes_t4_##mode##_init_key, \ 837 aes_t4_##mode##_cipher, \ 838 aes_##mode##_cleanup, \ 839 sizeof(EVP_AES_##MODE##_CTX), \ 840 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 841 static const EVP_CIPHER aes_##keylen##_##mode = { \ 842 nid##_##keylen##_##mode,blocksize, \ 843 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 844 flags|EVP_CIPH_##MODE##_MODE, \ 845 aes_##mode##_init_key, \ 846 aes_##mode##_cipher, \ 847 aes_##mode##_cleanup, \ 848 sizeof(EVP_AES_##MODE##_CTX), \ 849 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 850 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 851 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; } 852 853 # else 854 855 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 856 static const EVP_CIPHER aes_##keylen##_##mode = { \ 857 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 858 flags|EVP_CIPH_##MODE##_MODE, \ 859 aes_init_key, \ 860 aes_##mode##_cipher, \ 861 NULL, \ 862 sizeof(EVP_AES_KEY), \ 863 NULL,NULL,NULL,NULL }; \ 864 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 865 { return &aes_##keylen##_##mode; } 866 867 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 868 static const EVP_CIPHER aes_##keylen##_##mode = { \ 869 nid##_##keylen##_##mode,blocksize, \ 870 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 871 flags|EVP_CIPH_##MODE##_MODE, \ 872 aes_##mode##_init_key, \ 873 aes_##mode##_cipher, \ 874 aes_##mode##_cleanup, \ 875 sizeof(EVP_AES_##MODE##_CTX), \ 876 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 877 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 878 { return &aes_##keylen##_##mode; } 879 # endif 880 881 # if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__)) 882 # include "arm_arch.h" 883 # if __ARM_MAX_ARCH__>=7 884 # if defined(BSAES_ASM) 885 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON) 886 # endif 887 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES) 888 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key 889 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key 890 # define HWAES_encrypt aes_v8_encrypt 891 # define HWAES_decrypt aes_v8_decrypt 892 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt 893 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks 894 # endif 895 # endif 896 897 # if defined(HWAES_CAPABLE) 898 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits, 899 AES_KEY *key); 900 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits, 901 AES_KEY *key); 902 void HWAES_encrypt(const unsigned char *in, unsigned char *out, 903 const AES_KEY *key); 904 void HWAES_decrypt(const unsigned char *in, unsigned char *out, 905 const AES_KEY *key); 906 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out, 907 size_t length, const AES_KEY *key, 908 unsigned char *ivec, const int enc); 909 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, 910 size_t len, const AES_KEY *key, 911 const unsigned char ivec[16]); 912 # endif 913 914 # define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \ 915 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 916 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 917 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 918 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 919 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \ 920 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \ 921 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags) 922 923 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 924 const unsigned char *iv, int enc) 925 { 926 int ret, mode; 927 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 928 929 mode = ctx->cipher->flags & EVP_CIPH_MODE; 930 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 931 && !enc) 932 # ifdef HWAES_CAPABLE 933 if (HWAES_CAPABLE) { 934 ret = HWAES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks); 935 dat->block = (block128_f) HWAES_decrypt; 936 dat->stream.cbc = NULL; 937 # ifdef HWAES_cbc_encrypt 938 if (mode == EVP_CIPH_CBC_MODE) 939 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt; 940 # endif 941 } else 942 # endif 943 # ifdef BSAES_CAPABLE 944 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) { 945 ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks); 946 dat->block = (block128_f) AES_decrypt; 947 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt; 948 } else 949 # endif 950 # ifdef VPAES_CAPABLE 951 if (VPAES_CAPABLE) { 952 ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks); 953 dat->block = (block128_f) vpaes_decrypt; 954 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 955 (cbc128_f) vpaes_cbc_encrypt : NULL; 956 } else 957 # endif 958 { 959 ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks); 960 dat->block = (block128_f) AES_decrypt; 961 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 962 (cbc128_f) AES_cbc_encrypt : NULL; 963 } else 964 # ifdef HWAES_CAPABLE 965 if (HWAES_CAPABLE) { 966 ret = HWAES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks); 967 dat->block = (block128_f) HWAES_encrypt; 968 dat->stream.cbc = NULL; 969 # ifdef HWAES_cbc_encrypt 970 if (mode == EVP_CIPH_CBC_MODE) 971 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt; 972 else 973 # endif 974 # ifdef HWAES_ctr32_encrypt_blocks 975 if (mode == EVP_CIPH_CTR_MODE) 976 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; 977 else 978 # endif 979 (void)0; /* terminate potentially open 'else' */ 980 } else 981 # endif 982 # ifdef BSAES_CAPABLE 983 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) { 984 ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks); 985 dat->block = (block128_f) AES_encrypt; 986 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; 987 } else 988 # endif 989 # ifdef VPAES_CAPABLE 990 if (VPAES_CAPABLE) { 991 ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks); 992 dat->block = (block128_f) vpaes_encrypt; 993 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 994 (cbc128_f) vpaes_cbc_encrypt : NULL; 995 } else 996 # endif 997 { 998 ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks); 999 dat->block = (block128_f) AES_encrypt; 1000 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 1001 (cbc128_f) AES_cbc_encrypt : NULL; 1002 # ifdef AES_CTR_ASM 1003 if (mode == EVP_CIPH_CTR_MODE) 1004 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt; 1005 # endif 1006 } 1007 1008 if (ret < 0) { 1009 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 1010 return 0; 1011 } 1012 1013 return 1; 1014 } 1015 1016 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1017 const unsigned char *in, size_t len) 1018 { 1019 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 1020 1021 if (dat->stream.cbc) 1022 (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv, ctx->encrypt); 1023 else if (ctx->encrypt) 1024 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block); 1025 else 1026 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block); 1027 1028 return 1; 1029 } 1030 1031 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1032 const unsigned char *in, size_t len) 1033 { 1034 size_t bl = ctx->cipher->block_size; 1035 size_t i; 1036 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 1037 1038 if (len < bl) 1039 return 1; 1040 1041 for (i = 0, len -= bl; i <= len; i += bl) 1042 (*dat->block) (in + i, out + i, &dat->ks); 1043 1044 return 1; 1045 } 1046 1047 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1048 const unsigned char *in, size_t len) 1049 { 1050 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 1051 1052 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, 1053 ctx->iv, &ctx->num, dat->block); 1054 return 1; 1055 } 1056 1057 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1058 const unsigned char *in, size_t len) 1059 { 1060 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 1061 1062 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks, 1063 ctx->iv, &ctx->num, ctx->encrypt, dat->block); 1064 return 1; 1065 } 1066 1067 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1068 const unsigned char *in, size_t len) 1069 { 1070 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 1071 1072 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks, 1073 ctx->iv, &ctx->num, ctx->encrypt, dat->block); 1074 return 1; 1075 } 1076 1077 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1078 const unsigned char *in, size_t len) 1079 { 1080 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 1081 1082 if (ctx->flags & EVP_CIPH_FLAG_LENGTH_BITS) { 1083 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks, 1084 ctx->iv, &ctx->num, ctx->encrypt, dat->block); 1085 return 1; 1086 } 1087 1088 while (len >= MAXBITCHUNK) { 1089 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks, 1090 ctx->iv, &ctx->num, ctx->encrypt, dat->block); 1091 len -= MAXBITCHUNK; 1092 } 1093 if (len) 1094 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks, 1095 ctx->iv, &ctx->num, ctx->encrypt, dat->block); 1096 1097 return 1; 1098 } 1099 1100 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1101 const unsigned char *in, size_t len) 1102 { 1103 unsigned int num = ctx->num; 1104 EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data; 1105 1106 if (dat->stream.ctr) 1107 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, 1108 ctx->iv, ctx->buf, &num, dat->stream.ctr); 1109 else 1110 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, 1111 ctx->iv, ctx->buf, &num, dat->block); 1112 ctx->num = (size_t)num; 1113 return 1; 1114 } 1115 1116 BLOCK_CIPHER_generic_pack(NID_aes, 128, EVP_CIPH_FLAG_FIPS) 1117 BLOCK_CIPHER_generic_pack(NID_aes, 192, EVP_CIPH_FLAG_FIPS) 1118 BLOCK_CIPHER_generic_pack(NID_aes, 256, EVP_CIPH_FLAG_FIPS) 1119 1120 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c) 1121 { 1122 EVP_AES_GCM_CTX *gctx = c->cipher_data; 1123 if (gctx == NULL) 1124 return 0; 1125 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm)); 1126 if (gctx->iv != c->iv) 1127 OPENSSL_free(gctx->iv); 1128 return 1; 1129 } 1130 1131 /* increment counter (64-bit int) by 1 */ 1132 static void ctr64_inc(unsigned char *counter) 1133 { 1134 int n = 8; 1135 unsigned char c; 1136 1137 do { 1138 --n; 1139 c = counter[n]; 1140 ++c; 1141 counter[n] = c; 1142 if (c) 1143 return; 1144 } while (n); 1145 } 1146 1147 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 1148 { 1149 EVP_AES_GCM_CTX *gctx = c->cipher_data; 1150 switch (type) { 1151 case EVP_CTRL_INIT: 1152 gctx->key_set = 0; 1153 gctx->iv_set = 0; 1154 gctx->ivlen = c->cipher->iv_len; 1155 gctx->iv = c->iv; 1156 gctx->taglen = -1; 1157 gctx->iv_gen = 0; 1158 gctx->tls_aad_len = -1; 1159 return 1; 1160 1161 case EVP_CTRL_GCM_SET_IVLEN: 1162 if (arg <= 0) 1163 return 0; 1164 /* Allocate memory for IV if needed */ 1165 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) { 1166 if (gctx->iv != c->iv) 1167 OPENSSL_free(gctx->iv); 1168 gctx->iv = OPENSSL_malloc(arg); 1169 if (!gctx->iv) 1170 return 0; 1171 } 1172 gctx->ivlen = arg; 1173 return 1; 1174 1175 case EVP_CTRL_GCM_SET_TAG: 1176 if (arg <= 0 || arg > 16 || c->encrypt) 1177 return 0; 1178 memcpy(c->buf, ptr, arg); 1179 gctx->taglen = arg; 1180 return 1; 1181 1182 case EVP_CTRL_GCM_GET_TAG: 1183 if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) 1184 return 0; 1185 memcpy(ptr, c->buf, arg); 1186 return 1; 1187 1188 case EVP_CTRL_GCM_SET_IV_FIXED: 1189 /* Special case: -1 length restores whole IV */ 1190 if (arg == -1) { 1191 memcpy(gctx->iv, ptr, gctx->ivlen); 1192 gctx->iv_gen = 1; 1193 return 1; 1194 } 1195 /* 1196 * Fixed field must be at least 4 bytes and invocation field at least 1197 * 8. 1198 */ 1199 if ((arg < 4) || (gctx->ivlen - arg) < 8) 1200 return 0; 1201 if (arg) 1202 memcpy(gctx->iv, ptr, arg); 1203 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) 1204 return 0; 1205 gctx->iv_gen = 1; 1206 return 1; 1207 1208 case EVP_CTRL_GCM_IV_GEN: 1209 if (gctx->iv_gen == 0 || gctx->key_set == 0) 1210 return 0; 1211 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); 1212 if (arg <= 0 || arg > gctx->ivlen) 1213 arg = gctx->ivlen; 1214 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); 1215 /* 1216 * Invocation field will be at least 8 bytes in size and so no need 1217 * to check wrap around or increment more than last 8 bytes. 1218 */ 1219 ctr64_inc(gctx->iv + gctx->ivlen - 8); 1220 gctx->iv_set = 1; 1221 return 1; 1222 1223 case EVP_CTRL_GCM_SET_IV_INV: 1224 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) 1225 return 0; 1226 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); 1227 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); 1228 gctx->iv_set = 1; 1229 return 1; 1230 1231 case EVP_CTRL_AEAD_TLS1_AAD: 1232 /* Save the AAD for later use */ 1233 if (arg != EVP_AEAD_TLS1_AAD_LEN) 1234 return 0; 1235 memcpy(c->buf, ptr, arg); 1236 gctx->tls_aad_len = arg; 1237 { 1238 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1]; 1239 /* Correct length for explicit IV */ 1240 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) 1241 return 0; 1242 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; 1243 /* If decrypting correct for tag too */ 1244 if (!c->encrypt) { 1245 if (len < EVP_GCM_TLS_TAG_LEN) 1246 return 0; 1247 len -= EVP_GCM_TLS_TAG_LEN; 1248 } 1249 c->buf[arg - 2] = len >> 8; 1250 c->buf[arg - 1] = len & 0xff; 1251 } 1252 /* Extra padding: tag appended to record */ 1253 return EVP_GCM_TLS_TAG_LEN; 1254 1255 case EVP_CTRL_COPY: 1256 { 1257 EVP_CIPHER_CTX *out = ptr; 1258 EVP_AES_GCM_CTX *gctx_out = out->cipher_data; 1259 if (gctx->gcm.key) { 1260 if (gctx->gcm.key != &gctx->ks) 1261 return 0; 1262 gctx_out->gcm.key = &gctx_out->ks; 1263 } 1264 if (gctx->iv == c->iv) 1265 gctx_out->iv = out->iv; 1266 else { 1267 gctx_out->iv = OPENSSL_malloc(gctx->ivlen); 1268 if (!gctx_out->iv) 1269 return 0; 1270 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen); 1271 } 1272 return 1; 1273 } 1274 1275 default: 1276 return -1; 1277 1278 } 1279 } 1280 1281 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 1282 const unsigned char *iv, int enc) 1283 { 1284 EVP_AES_GCM_CTX *gctx = ctx->cipher_data; 1285 if (!iv && !key) 1286 return 1; 1287 if (key) { 1288 do { 1289 # ifdef HWAES_CAPABLE 1290 if (HWAES_CAPABLE) { 1291 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 1292 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 1293 (block128_f) HWAES_encrypt); 1294 # ifdef HWAES_ctr32_encrypt_blocks 1295 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; 1296 # else 1297 gctx->ctr = NULL; 1298 # endif 1299 break; 1300 } else 1301 # endif 1302 # ifdef BSAES_CAPABLE 1303 if (BSAES_CAPABLE) { 1304 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 1305 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 1306 (block128_f) AES_encrypt); 1307 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; 1308 break; 1309 } else 1310 # endif 1311 # ifdef VPAES_CAPABLE 1312 if (VPAES_CAPABLE) { 1313 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 1314 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 1315 (block128_f) vpaes_encrypt); 1316 gctx->ctr = NULL; 1317 break; 1318 } else 1319 # endif 1320 (void)0; /* terminate potentially open 'else' */ 1321 1322 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 1323 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 1324 (block128_f) AES_encrypt); 1325 # ifdef AES_CTR_ASM 1326 gctx->ctr = (ctr128_f) AES_ctr32_encrypt; 1327 # else 1328 gctx->ctr = NULL; 1329 # endif 1330 } while (0); 1331 1332 /* 1333 * If we have an iv can set it directly, otherwise use saved IV. 1334 */ 1335 if (iv == NULL && gctx->iv_set) 1336 iv = gctx->iv; 1337 if (iv) { 1338 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 1339 gctx->iv_set = 1; 1340 } 1341 gctx->key_set = 1; 1342 } else { 1343 /* If key set use IV, otherwise copy */ 1344 if (gctx->key_set) 1345 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 1346 else 1347 memcpy(gctx->iv, iv, gctx->ivlen); 1348 gctx->iv_set = 1; 1349 gctx->iv_gen = 0; 1350 } 1351 return 1; 1352 } 1353 1354 /* 1355 * Handle TLS GCM packet format. This consists of the last portion of the IV 1356 * followed by the payload and finally the tag. On encrypt generate IV, 1357 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload 1358 * and verify tag. 1359 */ 1360 1361 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1362 const unsigned char *in, size_t len) 1363 { 1364 EVP_AES_GCM_CTX *gctx = ctx->cipher_data; 1365 int rv = -1; 1366 /* Encrypt/decrypt must be performed in place */ 1367 if (out != in 1368 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN)) 1369 return -1; 1370 /* 1371 * Set IV from start of buffer or generate IV and write to start of 1372 * buffer. 1373 */ 1374 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? 1375 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV, 1376 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) 1377 goto err; 1378 /* Use saved AAD */ 1379 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len)) 1380 goto err; 1381 /* Fix buffer and length to point to payload */ 1382 in += EVP_GCM_TLS_EXPLICIT_IV_LEN; 1383 out += EVP_GCM_TLS_EXPLICIT_IV_LEN; 1384 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 1385 if (ctx->encrypt) { 1386 /* Encrypt payload */ 1387 if (gctx->ctr) { 1388 size_t bulk = 0; 1389 # if defined(AES_GCM_ASM) 1390 if (len >= 32 && AES_GCM_ASM(gctx)) { 1391 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) 1392 return -1; 1393 1394 bulk = AES_gcm_encrypt(in, out, len, 1395 gctx->gcm.key, 1396 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 1397 gctx->gcm.len.u[1] += bulk; 1398 } 1399 # endif 1400 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, 1401 in + bulk, 1402 out + bulk, 1403 len - bulk, gctx->ctr)) 1404 goto err; 1405 } else { 1406 size_t bulk = 0; 1407 # if defined(AES_GCM_ASM2) 1408 if (len >= 32 && AES_GCM_ASM2(gctx)) { 1409 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) 1410 return -1; 1411 1412 bulk = AES_gcm_encrypt(in, out, len, 1413 gctx->gcm.key, 1414 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 1415 gctx->gcm.len.u[1] += bulk; 1416 } 1417 # endif 1418 if (CRYPTO_gcm128_encrypt(&gctx->gcm, 1419 in + bulk, out + bulk, len - bulk)) 1420 goto err; 1421 } 1422 out += len; 1423 /* Finally write tag */ 1424 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN); 1425 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 1426 } else { 1427 /* Decrypt */ 1428 if (gctx->ctr) { 1429 size_t bulk = 0; 1430 # if defined(AES_GCM_ASM) 1431 if (len >= 16 && AES_GCM_ASM(gctx)) { 1432 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) 1433 return -1; 1434 1435 bulk = AES_gcm_decrypt(in, out, len, 1436 gctx->gcm.key, 1437 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 1438 gctx->gcm.len.u[1] += bulk; 1439 } 1440 # endif 1441 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, 1442 in + bulk, 1443 out + bulk, 1444 len - bulk, gctx->ctr)) 1445 goto err; 1446 } else { 1447 size_t bulk = 0; 1448 # if defined(AES_GCM_ASM2) 1449 if (len >= 16 && AES_GCM_ASM2(gctx)) { 1450 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) 1451 return -1; 1452 1453 bulk = AES_gcm_decrypt(in, out, len, 1454 gctx->gcm.key, 1455 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 1456 gctx->gcm.len.u[1] += bulk; 1457 } 1458 # endif 1459 if (CRYPTO_gcm128_decrypt(&gctx->gcm, 1460 in + bulk, out + bulk, len - bulk)) 1461 goto err; 1462 } 1463 /* Retrieve tag */ 1464 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN); 1465 /* If tag mismatch wipe buffer */ 1466 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) { 1467 OPENSSL_cleanse(out, len); 1468 goto err; 1469 } 1470 rv = len; 1471 } 1472 1473 err: 1474 gctx->iv_set = 0; 1475 gctx->tls_aad_len = -1; 1476 return rv; 1477 } 1478 1479 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1480 const unsigned char *in, size_t len) 1481 { 1482 EVP_AES_GCM_CTX *gctx = ctx->cipher_data; 1483 /* If not set up, return error */ 1484 if (!gctx->key_set) 1485 return -1; 1486 1487 if (gctx->tls_aad_len >= 0) 1488 return aes_gcm_tls_cipher(ctx, out, in, len); 1489 1490 if (!gctx->iv_set) 1491 return -1; 1492 if (in) { 1493 if (out == NULL) { 1494 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len)) 1495 return -1; 1496 } else if (ctx->encrypt) { 1497 if (gctx->ctr) { 1498 size_t bulk = 0; 1499 # if defined(AES_GCM_ASM) 1500 if (len >= 32 && AES_GCM_ASM(gctx)) { 1501 size_t res = (16 - gctx->gcm.mres) % 16; 1502 1503 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) 1504 return -1; 1505 1506 bulk = AES_gcm_encrypt(in + res, 1507 out + res, len - res, 1508 gctx->gcm.key, gctx->gcm.Yi.c, 1509 gctx->gcm.Xi.u); 1510 gctx->gcm.len.u[1] += bulk; 1511 bulk += res; 1512 } 1513 # endif 1514 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, 1515 in + bulk, 1516 out + bulk, 1517 len - bulk, gctx->ctr)) 1518 return -1; 1519 } else { 1520 size_t bulk = 0; 1521 # if defined(AES_GCM_ASM2) 1522 if (len >= 32 && AES_GCM_ASM2(gctx)) { 1523 size_t res = (16 - gctx->gcm.mres) % 16; 1524 1525 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) 1526 return -1; 1527 1528 bulk = AES_gcm_encrypt(in + res, 1529 out + res, len - res, 1530 gctx->gcm.key, gctx->gcm.Yi.c, 1531 gctx->gcm.Xi.u); 1532 gctx->gcm.len.u[1] += bulk; 1533 bulk += res; 1534 } 1535 # endif 1536 if (CRYPTO_gcm128_encrypt(&gctx->gcm, 1537 in + bulk, out + bulk, len - bulk)) 1538 return -1; 1539 } 1540 } else { 1541 if (gctx->ctr) { 1542 size_t bulk = 0; 1543 # if defined(AES_GCM_ASM) 1544 if (len >= 16 && AES_GCM_ASM(gctx)) { 1545 size_t res = (16 - gctx->gcm.mres) % 16; 1546 1547 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) 1548 return -1; 1549 1550 bulk = AES_gcm_decrypt(in + res, 1551 out + res, len - res, 1552 gctx->gcm.key, 1553 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 1554 gctx->gcm.len.u[1] += bulk; 1555 bulk += res; 1556 } 1557 # endif 1558 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, 1559 in + bulk, 1560 out + bulk, 1561 len - bulk, gctx->ctr)) 1562 return -1; 1563 } else { 1564 size_t bulk = 0; 1565 # if defined(AES_GCM_ASM2) 1566 if (len >= 16 && AES_GCM_ASM2(gctx)) { 1567 size_t res = (16 - gctx->gcm.mres) % 16; 1568 1569 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) 1570 return -1; 1571 1572 bulk = AES_gcm_decrypt(in + res, 1573 out + res, len - res, 1574 gctx->gcm.key, 1575 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 1576 gctx->gcm.len.u[1] += bulk; 1577 bulk += res; 1578 } 1579 # endif 1580 if (CRYPTO_gcm128_decrypt(&gctx->gcm, 1581 in + bulk, out + bulk, len - bulk)) 1582 return -1; 1583 } 1584 } 1585 return len; 1586 } else { 1587 if (!ctx->encrypt) { 1588 if (gctx->taglen < 0) 1589 return -1; 1590 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) 1591 return -1; 1592 gctx->iv_set = 0; 1593 return 0; 1594 } 1595 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16); 1596 gctx->taglen = 16; 1597 /* Don't reuse the IV */ 1598 gctx->iv_set = 0; 1599 return 0; 1600 } 1601 1602 } 1603 1604 # define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \ 1605 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ 1606 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ 1607 | EVP_CIPH_CUSTOM_COPY) 1608 1609 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM, 1610 EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER | 1611 CUSTOM_FLAGS) 1612 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM, 1613 EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER | 1614 CUSTOM_FLAGS) 1615 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM, 1616 EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER | 1617 CUSTOM_FLAGS) 1618 1619 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 1620 { 1621 EVP_AES_XTS_CTX *xctx = c->cipher_data; 1622 if (type == EVP_CTRL_COPY) { 1623 EVP_CIPHER_CTX *out = ptr; 1624 EVP_AES_XTS_CTX *xctx_out = out->cipher_data; 1625 if (xctx->xts.key1) { 1626 if (xctx->xts.key1 != &xctx->ks1) 1627 return 0; 1628 xctx_out->xts.key1 = &xctx_out->ks1; 1629 } 1630 if (xctx->xts.key2) { 1631 if (xctx->xts.key2 != &xctx->ks2) 1632 return 0; 1633 xctx_out->xts.key2 = &xctx_out->ks2; 1634 } 1635 return 1; 1636 } else if (type != EVP_CTRL_INIT) 1637 return -1; 1638 /* key1 and key2 are used as an indicator both key and IV are set */ 1639 xctx->xts.key1 = NULL; 1640 xctx->xts.key2 = NULL; 1641 return 1; 1642 } 1643 1644 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 1645 const unsigned char *iv, int enc) 1646 { 1647 EVP_AES_XTS_CTX *xctx = ctx->cipher_data; 1648 if (!iv && !key) 1649 return 1; 1650 1651 if (key) 1652 do { 1653 # ifdef AES_XTS_ASM 1654 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt; 1655 # else 1656 xctx->stream = NULL; 1657 # endif 1658 /* key_len is two AES keys */ 1659 # ifdef HWAES_CAPABLE 1660 if (HWAES_CAPABLE) { 1661 if (enc) { 1662 HWAES_set_encrypt_key(key, ctx->key_len * 4, 1663 &xctx->ks1.ks); 1664 xctx->xts.block1 = (block128_f) HWAES_encrypt; 1665 } else { 1666 HWAES_set_decrypt_key(key, ctx->key_len * 4, 1667 &xctx->ks1.ks); 1668 xctx->xts.block1 = (block128_f) HWAES_decrypt; 1669 } 1670 1671 HWAES_set_encrypt_key(key + ctx->key_len / 2, 1672 ctx->key_len * 4, &xctx->ks2.ks); 1673 xctx->xts.block2 = (block128_f) HWAES_encrypt; 1674 1675 xctx->xts.key1 = &xctx->ks1; 1676 break; 1677 } else 1678 # endif 1679 # ifdef BSAES_CAPABLE 1680 if (BSAES_CAPABLE) 1681 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt; 1682 else 1683 # endif 1684 # ifdef VPAES_CAPABLE 1685 if (VPAES_CAPABLE) { 1686 if (enc) { 1687 vpaes_set_encrypt_key(key, ctx->key_len * 4, 1688 &xctx->ks1.ks); 1689 xctx->xts.block1 = (block128_f) vpaes_encrypt; 1690 } else { 1691 vpaes_set_decrypt_key(key, ctx->key_len * 4, 1692 &xctx->ks1.ks); 1693 xctx->xts.block1 = (block128_f) vpaes_decrypt; 1694 } 1695 1696 vpaes_set_encrypt_key(key + ctx->key_len / 2, 1697 ctx->key_len * 4, &xctx->ks2.ks); 1698 xctx->xts.block2 = (block128_f) vpaes_encrypt; 1699 1700 xctx->xts.key1 = &xctx->ks1; 1701 break; 1702 } else 1703 # endif 1704 (void)0; /* terminate potentially open 'else' */ 1705 1706 if (enc) { 1707 AES_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks); 1708 xctx->xts.block1 = (block128_f) AES_encrypt; 1709 } else { 1710 AES_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks); 1711 xctx->xts.block1 = (block128_f) AES_decrypt; 1712 } 1713 1714 AES_set_encrypt_key(key + ctx->key_len / 2, 1715 ctx->key_len * 4, &xctx->ks2.ks); 1716 xctx->xts.block2 = (block128_f) AES_encrypt; 1717 1718 xctx->xts.key1 = &xctx->ks1; 1719 } while (0); 1720 1721 if (iv) { 1722 xctx->xts.key2 = &xctx->ks2; 1723 memcpy(ctx->iv, iv, 16); 1724 } 1725 1726 return 1; 1727 } 1728 1729 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1730 const unsigned char *in, size_t len) 1731 { 1732 EVP_AES_XTS_CTX *xctx = ctx->cipher_data; 1733 if (!xctx->xts.key1 || !xctx->xts.key2) 1734 return 0; 1735 if (!out || !in || len < AES_BLOCK_SIZE) 1736 return 0; 1737 if (xctx->stream) 1738 (*xctx->stream) (in, out, len, 1739 xctx->xts.key1, xctx->xts.key2, ctx->iv); 1740 else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len, 1741 ctx->encrypt)) 1742 return 0; 1743 return 1; 1744 } 1745 1746 # define aes_xts_cleanup NULL 1747 1748 # define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \ 1749 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ 1750 | EVP_CIPH_CUSTOM_COPY) 1751 1752 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, 1753 EVP_CIPH_FLAG_FIPS | XTS_FLAGS) 1754 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, 1755 EVP_CIPH_FLAG_FIPS | XTS_FLAGS) 1756 1757 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 1758 { 1759 EVP_AES_CCM_CTX *cctx = c->cipher_data; 1760 switch (type) { 1761 case EVP_CTRL_INIT: 1762 cctx->key_set = 0; 1763 cctx->iv_set = 0; 1764 cctx->L = 8; 1765 cctx->M = 12; 1766 cctx->tag_set = 0; 1767 cctx->len_set = 0; 1768 return 1; 1769 1770 case EVP_CTRL_CCM_SET_IVLEN: 1771 arg = 15 - arg; 1772 case EVP_CTRL_CCM_SET_L: 1773 if (arg < 2 || arg > 8) 1774 return 0; 1775 cctx->L = arg; 1776 return 1; 1777 1778 case EVP_CTRL_CCM_SET_TAG: 1779 if ((arg & 1) || arg < 4 || arg > 16) 1780 return 0; 1781 if (c->encrypt && ptr) 1782 return 0; 1783 if (ptr) { 1784 cctx->tag_set = 1; 1785 memcpy(c->buf, ptr, arg); 1786 } 1787 cctx->M = arg; 1788 return 1; 1789 1790 case EVP_CTRL_CCM_GET_TAG: 1791 if (!c->encrypt || !cctx->tag_set) 1792 return 0; 1793 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg)) 1794 return 0; 1795 cctx->tag_set = 0; 1796 cctx->iv_set = 0; 1797 cctx->len_set = 0; 1798 return 1; 1799 1800 case EVP_CTRL_COPY: 1801 { 1802 EVP_CIPHER_CTX *out = ptr; 1803 EVP_AES_CCM_CTX *cctx_out = out->cipher_data; 1804 if (cctx->ccm.key) { 1805 if (cctx->ccm.key != &cctx->ks) 1806 return 0; 1807 cctx_out->ccm.key = &cctx_out->ks; 1808 } 1809 return 1; 1810 } 1811 1812 default: 1813 return -1; 1814 1815 } 1816 } 1817 1818 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 1819 const unsigned char *iv, int enc) 1820 { 1821 EVP_AES_CCM_CTX *cctx = ctx->cipher_data; 1822 if (!iv && !key) 1823 return 1; 1824 if (key) 1825 do { 1826 # ifdef HWAES_CAPABLE 1827 if (HWAES_CAPABLE) { 1828 HWAES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks); 1829 1830 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 1831 &cctx->ks, (block128_f) HWAES_encrypt); 1832 cctx->str = NULL; 1833 cctx->key_set = 1; 1834 break; 1835 } else 1836 # endif 1837 # ifdef VPAES_CAPABLE 1838 if (VPAES_CAPABLE) { 1839 vpaes_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks); 1840 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 1841 &cctx->ks, (block128_f) vpaes_encrypt); 1842 cctx->str = NULL; 1843 cctx->key_set = 1; 1844 break; 1845 } 1846 # endif 1847 AES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks.ks); 1848 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 1849 &cctx->ks, (block128_f) AES_encrypt); 1850 cctx->str = NULL; 1851 cctx->key_set = 1; 1852 } while (0); 1853 if (iv) { 1854 memcpy(ctx->iv, iv, 15 - cctx->L); 1855 cctx->iv_set = 1; 1856 } 1857 return 1; 1858 } 1859 1860 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1861 const unsigned char *in, size_t len) 1862 { 1863 EVP_AES_CCM_CTX *cctx = ctx->cipher_data; 1864 CCM128_CONTEXT *ccm = &cctx->ccm; 1865 /* If not set up, return error */ 1866 if (!cctx->iv_set && !cctx->key_set) 1867 return -1; 1868 if (!ctx->encrypt && !cctx->tag_set) 1869 return -1; 1870 if (!out) { 1871 if (!in) { 1872 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len)) 1873 return -1; 1874 cctx->len_set = 1; 1875 return len; 1876 } 1877 /* If have AAD need message length */ 1878 if (!cctx->len_set && len) 1879 return -1; 1880 CRYPTO_ccm128_aad(ccm, in, len); 1881 return len; 1882 } 1883 /* EVP_*Final() doesn't return any data */ 1884 if (!in) 1885 return 0; 1886 /* If not set length yet do it */ 1887 if (!cctx->len_set) { 1888 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len)) 1889 return -1; 1890 cctx->len_set = 1; 1891 } 1892 if (ctx->encrypt) { 1893 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, 1894 cctx->str) : 1895 CRYPTO_ccm128_encrypt(ccm, in, out, len)) 1896 return -1; 1897 cctx->tag_set = 1; 1898 return len; 1899 } else { 1900 int rv = -1; 1901 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, 1902 cctx->str) : 1903 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) { 1904 unsigned char tag[16]; 1905 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) { 1906 if (!CRYPTO_memcmp(tag, ctx->buf, cctx->M)) 1907 rv = len; 1908 } 1909 } 1910 if (rv == -1) 1911 OPENSSL_cleanse(out, len); 1912 cctx->iv_set = 0; 1913 cctx->tag_set = 0; 1914 cctx->len_set = 0; 1915 return rv; 1916 } 1917 1918 } 1919 1920 # define aes_ccm_cleanup NULL 1921 1922 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM, 1923 EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS) 1924 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM, 1925 EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS) 1926 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM, 1927 EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS) 1928 #endif 1929 typedef struct { 1930 union { 1931 double align; 1932 AES_KEY ks; 1933 } ks; 1934 /* Indicates if IV has been set */ 1935 unsigned char *iv; 1936 } EVP_AES_WRAP_CTX; 1937 1938 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 1939 const unsigned char *iv, int enc) 1940 { 1941 EVP_AES_WRAP_CTX *wctx = ctx->cipher_data; 1942 if (!iv && !key) 1943 return 1; 1944 if (key) { 1945 if (ctx->encrypt) 1946 AES_set_encrypt_key(key, ctx->key_len * 8, &wctx->ks.ks); 1947 else 1948 AES_set_decrypt_key(key, ctx->key_len * 8, &wctx->ks.ks); 1949 if (!iv) 1950 wctx->iv = NULL; 1951 } 1952 if (iv) { 1953 memcpy(ctx->iv, iv, 8); 1954 wctx->iv = ctx->iv; 1955 } 1956 return 1; 1957 } 1958 1959 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1960 const unsigned char *in, size_t inlen) 1961 { 1962 EVP_AES_WRAP_CTX *wctx = ctx->cipher_data; 1963 size_t rv; 1964 if (!in) 1965 return 0; 1966 if (inlen % 8) 1967 return -1; 1968 if (ctx->encrypt && inlen < 8) 1969 return -1; 1970 if (!ctx->encrypt && inlen < 16) 1971 return -1; 1972 if (!out) { 1973 if (ctx->encrypt) 1974 return inlen + 8; 1975 else 1976 return inlen - 8; 1977 } 1978 if (ctx->encrypt) 1979 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv, out, in, inlen, 1980 (block128_f) AES_encrypt); 1981 else 1982 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv, out, in, inlen, 1983 (block128_f) AES_decrypt); 1984 return rv ? (int)rv : -1; 1985 } 1986 1987 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \ 1988 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ 1989 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1) 1990 1991 static const EVP_CIPHER aes_128_wrap = { 1992 NID_id_aes128_wrap, 1993 8, 16, 8, WRAP_FLAGS, 1994 aes_wrap_init_key, aes_wrap_cipher, 1995 NULL, 1996 sizeof(EVP_AES_WRAP_CTX), 1997 NULL, NULL, NULL, NULL 1998 }; 1999 2000 const EVP_CIPHER *EVP_aes_128_wrap(void) 2001 { 2002 return &aes_128_wrap; 2003 } 2004 2005 static const EVP_CIPHER aes_192_wrap = { 2006 NID_id_aes192_wrap, 2007 8, 24, 8, WRAP_FLAGS, 2008 aes_wrap_init_key, aes_wrap_cipher, 2009 NULL, 2010 sizeof(EVP_AES_WRAP_CTX), 2011 NULL, NULL, NULL, NULL 2012 }; 2013 2014 const EVP_CIPHER *EVP_aes_192_wrap(void) 2015 { 2016 return &aes_192_wrap; 2017 } 2018 2019 static const EVP_CIPHER aes_256_wrap = { 2020 NID_id_aes256_wrap, 2021 8, 32, 8, WRAP_FLAGS, 2022 aes_wrap_init_key, aes_wrap_cipher, 2023 NULL, 2024 sizeof(EVP_AES_WRAP_CTX), 2025 NULL, NULL, NULL, NULL 2026 }; 2027 2028 const EVP_CIPHER *EVP_aes_256_wrap(void) 2029 { 2030 return &aes_256_wrap; 2031 } 2032