1 /* 2 * Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved. 3 * 4 * Licensed under the OpenSSL license (the "License"). You may not use 5 * this file except in compliance with the License. You can obtain a copy 6 * in the file LICENSE in the source distribution or at 7 * https://www.openssl.org/source/license.html 8 */ 9 10 #include <openssl/opensslconf.h> 11 #include <openssl/crypto.h> 12 #include <openssl/evp.h> 13 #include <openssl/err.h> 14 #include <string.h> 15 #include <assert.h> 16 #include <openssl/aes.h> 17 #include "crypto/evp.h" 18 #include "modes_local.h" 19 #include <openssl/rand.h> 20 #include "evp_local.h" 21 22 typedef struct { 23 union { 24 double align; 25 AES_KEY ks; 26 } ks; 27 block128_f block; 28 union { 29 cbc128_f cbc; 30 ctr128_f ctr; 31 } stream; 32 } EVP_AES_KEY; 33 34 typedef struct { 35 union { 36 double align; 37 AES_KEY ks; 38 } ks; /* AES key schedule to use */ 39 int key_set; /* Set if key initialised */ 40 int iv_set; /* Set if an iv is set */ 41 GCM128_CONTEXT gcm; 42 unsigned char *iv; /* Temporary IV store */ 43 int ivlen; /* IV length */ 44 int taglen; 45 int iv_gen; /* It is OK to generate IVs */ 46 int tls_aad_len; /* TLS AAD length */ 47 ctr128_f ctr; 48 } EVP_AES_GCM_CTX; 49 50 typedef struct { 51 union { 52 double align; 53 AES_KEY ks; 54 } ks1, ks2; /* AES key schedules to use */ 55 XTS128_CONTEXT xts; 56 void (*stream) (const unsigned char *in, 57 unsigned char *out, size_t length, 58 const AES_KEY *key1, const AES_KEY *key2, 59 const unsigned char iv[16]); 60 } EVP_AES_XTS_CTX; 61 62 typedef struct { 63 union { 64 double align; 65 AES_KEY ks; 66 } ks; /* AES key schedule to use */ 67 int key_set; /* Set if key initialised */ 68 int iv_set; /* Set if an iv is set */ 69 int tag_set; /* Set if tag is valid */ 70 int len_set; /* Set if message length set */ 71 int L, M; /* L and M parameters from RFC3610 */ 72 int tls_aad_len; /* TLS AAD length */ 73 CCM128_CONTEXT ccm; 74 ccm128_f str; 75 } EVP_AES_CCM_CTX; 76 77 #ifndef OPENSSL_NO_OCB 78 typedef struct { 79 union { 80 double align; 81 AES_KEY ks; 82 } ksenc; /* AES key schedule to use for encryption */ 83 union { 84 double align; 85 AES_KEY ks; 86 } ksdec; /* AES key schedule to use for decryption */ 87 int key_set; /* Set if key initialised */ 88 int iv_set; /* Set if an iv is set */ 89 OCB128_CONTEXT ocb; 90 unsigned char *iv; /* Temporary IV store */ 91 unsigned char tag[16]; 92 unsigned char data_buf[16]; /* Store partial data blocks */ 93 unsigned char aad_buf[16]; /* Store partial AAD blocks */ 94 int data_buf_len; 95 int aad_buf_len; 96 int ivlen; /* IV length */ 97 int taglen; 98 } EVP_AES_OCB_CTX; 99 #endif 100 101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4)) 102 103 #ifdef VPAES_ASM 104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits, 105 AES_KEY *key); 106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits, 107 AES_KEY *key); 108 109 void vpaes_encrypt(const unsigned char *in, unsigned char *out, 110 const AES_KEY *key); 111 void vpaes_decrypt(const unsigned char *in, unsigned char *out, 112 const AES_KEY *key); 113 114 void vpaes_cbc_encrypt(const unsigned char *in, 115 unsigned char *out, 116 size_t length, 117 const AES_KEY *key, unsigned char *ivec, int enc); 118 #endif 119 #ifdef BSAES_ASM 120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out, 121 size_t length, const AES_KEY *key, 122 unsigned char ivec[16], int enc); 123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, 124 size_t len, const AES_KEY *key, 125 const unsigned char ivec[16]); 126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out, 127 size_t len, const AES_KEY *key1, 128 const AES_KEY *key2, const unsigned char iv[16]); 129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out, 130 size_t len, const AES_KEY *key1, 131 const AES_KEY *key2, const unsigned char iv[16]); 132 #endif 133 #ifdef AES_CTR_ASM 134 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out, 135 size_t blocks, const AES_KEY *key, 136 const unsigned char ivec[AES_BLOCK_SIZE]); 137 #endif 138 #ifdef AES_XTS_ASM 139 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len, 140 const AES_KEY *key1, const AES_KEY *key2, 141 const unsigned char iv[16]); 142 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len, 143 const AES_KEY *key1, const AES_KEY *key2, 144 const unsigned char iv[16]); 145 #endif 146 147 /* increment counter (64-bit int) by 1 */ 148 static void ctr64_inc(unsigned char *counter) 149 { 150 int n = 8; 151 unsigned char c; 152 153 do { 154 --n; 155 c = counter[n]; 156 ++c; 157 counter[n] = c; 158 if (c) 159 return; 160 } while (n); 161 } 162 163 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC)) 164 # include "ppc_arch.h" 165 # ifdef VPAES_ASM 166 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC) 167 # endif 168 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207) 169 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key 170 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key 171 # define HWAES_encrypt aes_p8_encrypt 172 # define HWAES_decrypt aes_p8_decrypt 173 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt 174 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks 175 # define HWAES_xts_encrypt aes_p8_xts_encrypt 176 # define HWAES_xts_decrypt aes_p8_xts_decrypt 177 #endif 178 179 #if defined(OPENSSL_CPUID_OBJ) && ( \ 180 ((defined(__i386) || defined(__i386__) || \ 181 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \ 182 defined(__x86_64) || defined(__x86_64__) || \ 183 defined(_M_AMD64) || defined(_M_X64) ) 184 185 extern unsigned int OPENSSL_ia32cap_P[]; 186 187 # ifdef VPAES_ASM 188 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) 189 # endif 190 # ifdef BSAES_ASM 191 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) 192 # endif 193 /* 194 * AES-NI section 195 */ 196 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32))) 197 198 int aesni_set_encrypt_key(const unsigned char *userKey, int bits, 199 AES_KEY *key); 200 int aesni_set_decrypt_key(const unsigned char *userKey, int bits, 201 AES_KEY *key); 202 203 void aesni_encrypt(const unsigned char *in, unsigned char *out, 204 const AES_KEY *key); 205 void aesni_decrypt(const unsigned char *in, unsigned char *out, 206 const AES_KEY *key); 207 208 void aesni_ecb_encrypt(const unsigned char *in, 209 unsigned char *out, 210 size_t length, const AES_KEY *key, int enc); 211 void aesni_cbc_encrypt(const unsigned char *in, 212 unsigned char *out, 213 size_t length, 214 const AES_KEY *key, unsigned char *ivec, int enc); 215 216 void aesni_ctr32_encrypt_blocks(const unsigned char *in, 217 unsigned char *out, 218 size_t blocks, 219 const void *key, const unsigned char *ivec); 220 221 void aesni_xts_encrypt(const unsigned char *in, 222 unsigned char *out, 223 size_t length, 224 const AES_KEY *key1, const AES_KEY *key2, 225 const unsigned char iv[16]); 226 227 void aesni_xts_decrypt(const unsigned char *in, 228 unsigned char *out, 229 size_t length, 230 const AES_KEY *key1, const AES_KEY *key2, 231 const unsigned char iv[16]); 232 233 void aesni_ccm64_encrypt_blocks(const unsigned char *in, 234 unsigned char *out, 235 size_t blocks, 236 const void *key, 237 const unsigned char ivec[16], 238 unsigned char cmac[16]); 239 240 void aesni_ccm64_decrypt_blocks(const unsigned char *in, 241 unsigned char *out, 242 size_t blocks, 243 const void *key, 244 const unsigned char ivec[16], 245 unsigned char cmac[16]); 246 247 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64) 248 size_t aesni_gcm_encrypt(const unsigned char *in, 249 unsigned char *out, 250 size_t len, 251 const void *key, unsigned char ivec[16], u64 *Xi); 252 # define AES_gcm_encrypt aesni_gcm_encrypt 253 size_t aesni_gcm_decrypt(const unsigned char *in, 254 unsigned char *out, 255 size_t len, 256 const void *key, unsigned char ivec[16], u64 *Xi); 257 # define AES_gcm_decrypt aesni_gcm_decrypt 258 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in, 259 size_t len); 260 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \ 261 gctx->gcm.ghash==gcm_ghash_avx) 262 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \ 263 gctx->gcm.ghash==gcm_ghash_avx) 264 # undef AES_GCM_ASM2 /* minor size optimization */ 265 # endif 266 267 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 268 const unsigned char *iv, int enc) 269 { 270 int ret, mode; 271 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 272 273 mode = EVP_CIPHER_CTX_mode(ctx); 274 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 275 && !enc) { 276 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 277 &dat->ks.ks); 278 dat->block = (block128_f) aesni_decrypt; 279 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 280 (cbc128_f) aesni_cbc_encrypt : NULL; 281 } else { 282 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 283 &dat->ks.ks); 284 dat->block = (block128_f) aesni_encrypt; 285 if (mode == EVP_CIPH_CBC_MODE) 286 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt; 287 else if (mode == EVP_CIPH_CTR_MODE) 288 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; 289 else 290 dat->stream.cbc = NULL; 291 } 292 293 if (ret < 0) { 294 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 295 return 0; 296 } 297 298 return 1; 299 } 300 301 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 302 const unsigned char *in, size_t len) 303 { 304 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks, 305 EVP_CIPHER_CTX_iv_noconst(ctx), 306 EVP_CIPHER_CTX_encrypting(ctx)); 307 308 return 1; 309 } 310 311 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 312 const unsigned char *in, size_t len) 313 { 314 size_t bl = EVP_CIPHER_CTX_block_size(ctx); 315 316 if (len < bl) 317 return 1; 318 319 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks, 320 EVP_CIPHER_CTX_encrypting(ctx)); 321 322 return 1; 323 } 324 325 # define aesni_ofb_cipher aes_ofb_cipher 326 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 327 const unsigned char *in, size_t len); 328 329 # define aesni_cfb_cipher aes_cfb_cipher 330 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 331 const unsigned char *in, size_t len); 332 333 # define aesni_cfb8_cipher aes_cfb8_cipher 334 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 335 const unsigned char *in, size_t len); 336 337 # define aesni_cfb1_cipher aes_cfb1_cipher 338 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 339 const unsigned char *in, size_t len); 340 341 # define aesni_ctr_cipher aes_ctr_cipher 342 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 343 const unsigned char *in, size_t len); 344 345 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 346 const unsigned char *iv, int enc) 347 { 348 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 349 if (!iv && !key) 350 return 1; 351 if (key) { 352 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 353 &gctx->ks.ks); 354 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt); 355 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; 356 /* 357 * If we have an iv can set it directly, otherwise use saved IV. 358 */ 359 if (iv == NULL && gctx->iv_set) 360 iv = gctx->iv; 361 if (iv) { 362 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 363 gctx->iv_set = 1; 364 } 365 gctx->key_set = 1; 366 } else { 367 /* If key set use IV, otherwise copy */ 368 if (gctx->key_set) 369 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 370 else 371 memcpy(gctx->iv, iv, gctx->ivlen); 372 gctx->iv_set = 1; 373 gctx->iv_gen = 0; 374 } 375 return 1; 376 } 377 378 # define aesni_gcm_cipher aes_gcm_cipher 379 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 380 const unsigned char *in, size_t len); 381 382 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 383 const unsigned char *iv, int enc) 384 { 385 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 386 387 if (!iv && !key) 388 return 1; 389 390 if (key) { 391 /* The key is two half length keys in reality */ 392 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2; 393 394 /* 395 * Verify that the two keys are different. 396 * 397 * This addresses Rogaway's vulnerability. 398 * See comment in aes_xts_init_key() below. 399 */ 400 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) { 401 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS); 402 return 0; 403 } 404 405 /* key_len is two AES keys */ 406 if (enc) { 407 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 408 &xctx->ks1.ks); 409 xctx->xts.block1 = (block128_f) aesni_encrypt; 410 xctx->stream = aesni_xts_encrypt; 411 } else { 412 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 413 &xctx->ks1.ks); 414 xctx->xts.block1 = (block128_f) aesni_decrypt; 415 xctx->stream = aesni_xts_decrypt; 416 } 417 418 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 419 EVP_CIPHER_CTX_key_length(ctx) * 4, 420 &xctx->ks2.ks); 421 xctx->xts.block2 = (block128_f) aesni_encrypt; 422 423 xctx->xts.key1 = &xctx->ks1; 424 } 425 426 if (iv) { 427 xctx->xts.key2 = &xctx->ks2; 428 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 429 } 430 431 return 1; 432 } 433 434 # define aesni_xts_cipher aes_xts_cipher 435 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 436 const unsigned char *in, size_t len); 437 438 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 439 const unsigned char *iv, int enc) 440 { 441 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 442 if (!iv && !key) 443 return 1; 444 if (key) { 445 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 446 &cctx->ks.ks); 447 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 448 &cctx->ks, (block128_f) aesni_encrypt); 449 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks : 450 (ccm128_f) aesni_ccm64_decrypt_blocks; 451 cctx->key_set = 1; 452 } 453 if (iv) { 454 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 455 cctx->iv_set = 1; 456 } 457 return 1; 458 } 459 460 # define aesni_ccm_cipher aes_ccm_cipher 461 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 462 const unsigned char *in, size_t len); 463 464 # ifndef OPENSSL_NO_OCB 465 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out, 466 size_t blocks, const void *key, 467 size_t start_block_num, 468 unsigned char offset_i[16], 469 const unsigned char L_[][16], 470 unsigned char checksum[16]); 471 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out, 472 size_t blocks, const void *key, 473 size_t start_block_num, 474 unsigned char offset_i[16], 475 const unsigned char L_[][16], 476 unsigned char checksum[16]); 477 478 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 479 const unsigned char *iv, int enc) 480 { 481 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 482 if (!iv && !key) 483 return 1; 484 if (key) { 485 do { 486 /* 487 * We set both the encrypt and decrypt key here because decrypt 488 * needs both. We could possibly optimise to remove setting the 489 * decrypt for an encryption operation. 490 */ 491 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 492 &octx->ksenc.ks); 493 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 494 &octx->ksdec.ks); 495 if (!CRYPTO_ocb128_init(&octx->ocb, 496 &octx->ksenc.ks, &octx->ksdec.ks, 497 (block128_f) aesni_encrypt, 498 (block128_f) aesni_decrypt, 499 enc ? aesni_ocb_encrypt 500 : aesni_ocb_decrypt)) 501 return 0; 502 } 503 while (0); 504 505 /* 506 * If we have an iv we can set it directly, otherwise use saved IV. 507 */ 508 if (iv == NULL && octx->iv_set) 509 iv = octx->iv; 510 if (iv) { 511 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 512 != 1) 513 return 0; 514 octx->iv_set = 1; 515 } 516 octx->key_set = 1; 517 } else { 518 /* If key set use IV, otherwise copy */ 519 if (octx->key_set) 520 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 521 else 522 memcpy(octx->iv, iv, octx->ivlen); 523 octx->iv_set = 1; 524 } 525 return 1; 526 } 527 528 # define aesni_ocb_cipher aes_ocb_cipher 529 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 530 const unsigned char *in, size_t len); 531 # endif /* OPENSSL_NO_OCB */ 532 533 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 534 static const EVP_CIPHER aesni_##keylen##_##mode = { \ 535 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 536 flags|EVP_CIPH_##MODE##_MODE, \ 537 aesni_init_key, \ 538 aesni_##mode##_cipher, \ 539 NULL, \ 540 sizeof(EVP_AES_KEY), \ 541 NULL,NULL,NULL,NULL }; \ 542 static const EVP_CIPHER aes_##keylen##_##mode = { \ 543 nid##_##keylen##_##nmode,blocksize, \ 544 keylen/8,ivlen, \ 545 flags|EVP_CIPH_##MODE##_MODE, \ 546 aes_init_key, \ 547 aes_##mode##_cipher, \ 548 NULL, \ 549 sizeof(EVP_AES_KEY), \ 550 NULL,NULL,NULL,NULL }; \ 551 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 552 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } 553 554 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 555 static const EVP_CIPHER aesni_##keylen##_##mode = { \ 556 nid##_##keylen##_##mode,blocksize, \ 557 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 558 flags|EVP_CIPH_##MODE##_MODE, \ 559 aesni_##mode##_init_key, \ 560 aesni_##mode##_cipher, \ 561 aes_##mode##_cleanup, \ 562 sizeof(EVP_AES_##MODE##_CTX), \ 563 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 564 static const EVP_CIPHER aes_##keylen##_##mode = { \ 565 nid##_##keylen##_##mode,blocksize, \ 566 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 567 flags|EVP_CIPH_##MODE##_MODE, \ 568 aes_##mode##_init_key, \ 569 aes_##mode##_cipher, \ 570 aes_##mode##_cleanup, \ 571 sizeof(EVP_AES_##MODE##_CTX), \ 572 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 573 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 574 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } 575 576 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__)) 577 578 # include "sparc_arch.h" 579 580 extern unsigned int OPENSSL_sparcv9cap_P[]; 581 582 /* 583 * Initial Fujitsu SPARC64 X support 584 */ 585 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX) 586 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key 587 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key 588 # define HWAES_encrypt aes_fx_encrypt 589 # define HWAES_decrypt aes_fx_decrypt 590 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt 591 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks 592 593 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES) 594 595 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks); 596 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks); 597 void aes_t4_encrypt(const unsigned char *in, unsigned char *out, 598 const AES_KEY *key); 599 void aes_t4_decrypt(const unsigned char *in, unsigned char *out, 600 const AES_KEY *key); 601 /* 602 * Key-length specific subroutines were chosen for following reason. 603 * Each SPARC T4 core can execute up to 8 threads which share core's 604 * resources. Loading as much key material to registers allows to 605 * minimize references to shared memory interface, as well as amount 606 * of instructions in inner loops [much needed on T4]. But then having 607 * non-key-length specific routines would require conditional branches 608 * either in inner loops or on subroutines' entries. Former is hardly 609 * acceptable, while latter means code size increase to size occupied 610 * by multiple key-length specific subroutines, so why fight? 611 */ 612 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 613 size_t len, const AES_KEY *key, 614 unsigned char *ivec, int /*unused*/); 615 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 616 size_t len, const AES_KEY *key, 617 unsigned char *ivec, int /*unused*/); 618 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 619 size_t len, const AES_KEY *key, 620 unsigned char *ivec, int /*unused*/); 621 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 622 size_t len, const AES_KEY *key, 623 unsigned char *ivec, int /*unused*/); 624 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 625 size_t len, const AES_KEY *key, 626 unsigned char *ivec, int /*unused*/); 627 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 628 size_t len, const AES_KEY *key, 629 unsigned char *ivec, int /*unused*/); 630 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 631 size_t blocks, const AES_KEY *key, 632 unsigned char *ivec); 633 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 634 size_t blocks, const AES_KEY *key, 635 unsigned char *ivec); 636 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 637 size_t blocks, const AES_KEY *key, 638 unsigned char *ivec); 639 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out, 640 size_t blocks, const AES_KEY *key1, 641 const AES_KEY *key2, const unsigned char *ivec); 642 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out, 643 size_t blocks, const AES_KEY *key1, 644 const AES_KEY *key2, const unsigned char *ivec); 645 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out, 646 size_t blocks, const AES_KEY *key1, 647 const AES_KEY *key2, const unsigned char *ivec); 648 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out, 649 size_t blocks, const AES_KEY *key1, 650 const AES_KEY *key2, const unsigned char *ivec); 651 652 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 653 const unsigned char *iv, int enc) 654 { 655 int ret, mode, bits; 656 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 657 658 mode = EVP_CIPHER_CTX_mode(ctx); 659 bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 660 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 661 && !enc) { 662 ret = 0; 663 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks); 664 dat->block = (block128_f) aes_t4_decrypt; 665 switch (bits) { 666 case 128: 667 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 668 (cbc128_f) aes128_t4_cbc_decrypt : NULL; 669 break; 670 case 192: 671 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 672 (cbc128_f) aes192_t4_cbc_decrypt : NULL; 673 break; 674 case 256: 675 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 676 (cbc128_f) aes256_t4_cbc_decrypt : NULL; 677 break; 678 default: 679 ret = -1; 680 } 681 } else { 682 ret = 0; 683 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks); 684 dat->block = (block128_f) aes_t4_encrypt; 685 switch (bits) { 686 case 128: 687 if (mode == EVP_CIPH_CBC_MODE) 688 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt; 689 else if (mode == EVP_CIPH_CTR_MODE) 690 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt; 691 else 692 dat->stream.cbc = NULL; 693 break; 694 case 192: 695 if (mode == EVP_CIPH_CBC_MODE) 696 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt; 697 else if (mode == EVP_CIPH_CTR_MODE) 698 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt; 699 else 700 dat->stream.cbc = NULL; 701 break; 702 case 256: 703 if (mode == EVP_CIPH_CBC_MODE) 704 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt; 705 else if (mode == EVP_CIPH_CTR_MODE) 706 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt; 707 else 708 dat->stream.cbc = NULL; 709 break; 710 default: 711 ret = -1; 712 } 713 } 714 715 if (ret < 0) { 716 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 717 return 0; 718 } 719 720 return 1; 721 } 722 723 # define aes_t4_cbc_cipher aes_cbc_cipher 724 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 725 const unsigned char *in, size_t len); 726 727 # define aes_t4_ecb_cipher aes_ecb_cipher 728 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 729 const unsigned char *in, size_t len); 730 731 # define aes_t4_ofb_cipher aes_ofb_cipher 732 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 733 const unsigned char *in, size_t len); 734 735 # define aes_t4_cfb_cipher aes_cfb_cipher 736 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 737 const unsigned char *in, size_t len); 738 739 # define aes_t4_cfb8_cipher aes_cfb8_cipher 740 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 741 const unsigned char *in, size_t len); 742 743 # define aes_t4_cfb1_cipher aes_cfb1_cipher 744 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 745 const unsigned char *in, size_t len); 746 747 # define aes_t4_ctr_cipher aes_ctr_cipher 748 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 749 const unsigned char *in, size_t len); 750 751 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 752 const unsigned char *iv, int enc) 753 { 754 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 755 if (!iv && !key) 756 return 1; 757 if (key) { 758 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 759 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks); 760 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 761 (block128_f) aes_t4_encrypt); 762 switch (bits) { 763 case 128: 764 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt; 765 break; 766 case 192: 767 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt; 768 break; 769 case 256: 770 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt; 771 break; 772 default: 773 return 0; 774 } 775 /* 776 * If we have an iv can set it directly, otherwise use saved IV. 777 */ 778 if (iv == NULL && gctx->iv_set) 779 iv = gctx->iv; 780 if (iv) { 781 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 782 gctx->iv_set = 1; 783 } 784 gctx->key_set = 1; 785 } else { 786 /* If key set use IV, otherwise copy */ 787 if (gctx->key_set) 788 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 789 else 790 memcpy(gctx->iv, iv, gctx->ivlen); 791 gctx->iv_set = 1; 792 gctx->iv_gen = 0; 793 } 794 return 1; 795 } 796 797 # define aes_t4_gcm_cipher aes_gcm_cipher 798 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 799 const unsigned char *in, size_t len); 800 801 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 802 const unsigned char *iv, int enc) 803 { 804 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 805 806 if (!iv && !key) 807 return 1; 808 809 if (key) { 810 /* The key is two half length keys in reality */ 811 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2; 812 const int bits = bytes * 8; 813 814 /* 815 * Verify that the two keys are different. 816 * 817 * This addresses Rogaway's vulnerability. 818 * See comment in aes_xts_init_key() below. 819 */ 820 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) { 821 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS); 822 return 0; 823 } 824 825 xctx->stream = NULL; 826 /* key_len is two AES keys */ 827 if (enc) { 828 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks); 829 xctx->xts.block1 = (block128_f) aes_t4_encrypt; 830 switch (bits) { 831 case 128: 832 xctx->stream = aes128_t4_xts_encrypt; 833 break; 834 case 256: 835 xctx->stream = aes256_t4_xts_encrypt; 836 break; 837 default: 838 return 0; 839 } 840 } else { 841 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 842 &xctx->ks1.ks); 843 xctx->xts.block1 = (block128_f) aes_t4_decrypt; 844 switch (bits) { 845 case 128: 846 xctx->stream = aes128_t4_xts_decrypt; 847 break; 848 case 256: 849 xctx->stream = aes256_t4_xts_decrypt; 850 break; 851 default: 852 return 0; 853 } 854 } 855 856 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 857 EVP_CIPHER_CTX_key_length(ctx) * 4, 858 &xctx->ks2.ks); 859 xctx->xts.block2 = (block128_f) aes_t4_encrypt; 860 861 xctx->xts.key1 = &xctx->ks1; 862 } 863 864 if (iv) { 865 xctx->xts.key2 = &xctx->ks2; 866 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 867 } 868 869 return 1; 870 } 871 872 # define aes_t4_xts_cipher aes_xts_cipher 873 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 874 const unsigned char *in, size_t len); 875 876 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 877 const unsigned char *iv, int enc) 878 { 879 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 880 if (!iv && !key) 881 return 1; 882 if (key) { 883 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 884 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks); 885 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 886 &cctx->ks, (block128_f) aes_t4_encrypt); 887 cctx->str = NULL; 888 cctx->key_set = 1; 889 } 890 if (iv) { 891 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 892 cctx->iv_set = 1; 893 } 894 return 1; 895 } 896 897 # define aes_t4_ccm_cipher aes_ccm_cipher 898 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 899 const unsigned char *in, size_t len); 900 901 # ifndef OPENSSL_NO_OCB 902 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 903 const unsigned char *iv, int enc) 904 { 905 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 906 if (!iv && !key) 907 return 1; 908 if (key) { 909 do { 910 /* 911 * We set both the encrypt and decrypt key here because decrypt 912 * needs both. We could possibly optimise to remove setting the 913 * decrypt for an encryption operation. 914 */ 915 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 916 &octx->ksenc.ks); 917 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 918 &octx->ksdec.ks); 919 if (!CRYPTO_ocb128_init(&octx->ocb, 920 &octx->ksenc.ks, &octx->ksdec.ks, 921 (block128_f) aes_t4_encrypt, 922 (block128_f) aes_t4_decrypt, 923 NULL)) 924 return 0; 925 } 926 while (0); 927 928 /* 929 * If we have an iv we can set it directly, otherwise use saved IV. 930 */ 931 if (iv == NULL && octx->iv_set) 932 iv = octx->iv; 933 if (iv) { 934 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 935 != 1) 936 return 0; 937 octx->iv_set = 1; 938 } 939 octx->key_set = 1; 940 } else { 941 /* If key set use IV, otherwise copy */ 942 if (octx->key_set) 943 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 944 else 945 memcpy(octx->iv, iv, octx->ivlen); 946 octx->iv_set = 1; 947 } 948 return 1; 949 } 950 951 # define aes_t4_ocb_cipher aes_ocb_cipher 952 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 953 const unsigned char *in, size_t len); 954 # endif /* OPENSSL_NO_OCB */ 955 956 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 957 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \ 958 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 959 flags|EVP_CIPH_##MODE##_MODE, \ 960 aes_t4_init_key, \ 961 aes_t4_##mode##_cipher, \ 962 NULL, \ 963 sizeof(EVP_AES_KEY), \ 964 NULL,NULL,NULL,NULL }; \ 965 static const EVP_CIPHER aes_##keylen##_##mode = { \ 966 nid##_##keylen##_##nmode,blocksize, \ 967 keylen/8,ivlen, \ 968 flags|EVP_CIPH_##MODE##_MODE, \ 969 aes_init_key, \ 970 aes_##mode##_cipher, \ 971 NULL, \ 972 sizeof(EVP_AES_KEY), \ 973 NULL,NULL,NULL,NULL }; \ 974 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 975 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; } 976 977 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 978 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \ 979 nid##_##keylen##_##mode,blocksize, \ 980 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 981 flags|EVP_CIPH_##MODE##_MODE, \ 982 aes_t4_##mode##_init_key, \ 983 aes_t4_##mode##_cipher, \ 984 aes_##mode##_cleanup, \ 985 sizeof(EVP_AES_##MODE##_CTX), \ 986 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 987 static const EVP_CIPHER aes_##keylen##_##mode = { \ 988 nid##_##keylen##_##mode,blocksize, \ 989 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 990 flags|EVP_CIPH_##MODE##_MODE, \ 991 aes_##mode##_init_key, \ 992 aes_##mode##_cipher, \ 993 aes_##mode##_cleanup, \ 994 sizeof(EVP_AES_##MODE##_CTX), \ 995 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 996 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 997 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; } 998 999 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__) 1000 /* 1001 * IBM S390X support 1002 */ 1003 # include "s390x_arch.h" 1004 1005 typedef struct { 1006 union { 1007 double align; 1008 /*- 1009 * KM-AES parameter block - begin 1010 * (see z/Architecture Principles of Operation >= SA22-7832-06) 1011 */ 1012 struct { 1013 unsigned char k[32]; 1014 } param; 1015 /* KM-AES parameter block - end */ 1016 } km; 1017 unsigned int fc; 1018 } S390X_AES_ECB_CTX; 1019 1020 typedef struct { 1021 union { 1022 double align; 1023 /*- 1024 * KMO-AES parameter block - begin 1025 * (see z/Architecture Principles of Operation >= SA22-7832-08) 1026 */ 1027 struct { 1028 unsigned char cv[16]; 1029 unsigned char k[32]; 1030 } param; 1031 /* KMO-AES parameter block - end */ 1032 } kmo; 1033 unsigned int fc; 1034 1035 int res; 1036 } S390X_AES_OFB_CTX; 1037 1038 typedef struct { 1039 union { 1040 double align; 1041 /*- 1042 * KMF-AES parameter block - begin 1043 * (see z/Architecture Principles of Operation >= SA22-7832-08) 1044 */ 1045 struct { 1046 unsigned char cv[16]; 1047 unsigned char k[32]; 1048 } param; 1049 /* KMF-AES parameter block - end */ 1050 } kmf; 1051 unsigned int fc; 1052 1053 int res; 1054 } S390X_AES_CFB_CTX; 1055 1056 typedef struct { 1057 union { 1058 double align; 1059 /*- 1060 * KMA-GCM-AES parameter block - begin 1061 * (see z/Architecture Principles of Operation >= SA22-7832-11) 1062 */ 1063 struct { 1064 unsigned char reserved[12]; 1065 union { 1066 unsigned int w; 1067 unsigned char b[4]; 1068 } cv; 1069 union { 1070 unsigned long long g[2]; 1071 unsigned char b[16]; 1072 } t; 1073 unsigned char h[16]; 1074 unsigned long long taadl; 1075 unsigned long long tpcl; 1076 union { 1077 unsigned long long g[2]; 1078 unsigned int w[4]; 1079 } j0; 1080 unsigned char k[32]; 1081 } param; 1082 /* KMA-GCM-AES parameter block - end */ 1083 } kma; 1084 unsigned int fc; 1085 int key_set; 1086 1087 unsigned char *iv; 1088 int ivlen; 1089 int iv_set; 1090 int iv_gen; 1091 1092 int taglen; 1093 1094 unsigned char ares[16]; 1095 unsigned char mres[16]; 1096 unsigned char kres[16]; 1097 int areslen; 1098 int mreslen; 1099 int kreslen; 1100 1101 int tls_aad_len; 1102 } S390X_AES_GCM_CTX; 1103 1104 typedef struct { 1105 union { 1106 double align; 1107 /*- 1108 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and 1109 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's 1110 * rounds field is used to store the function code and that the key 1111 * schedule is not stored (if aes hardware support is detected). 1112 */ 1113 struct { 1114 unsigned char pad[16]; 1115 AES_KEY k; 1116 } key; 1117 1118 struct { 1119 /*- 1120 * KMAC-AES parameter block - begin 1121 * (see z/Architecture Principles of Operation >= SA22-7832-08) 1122 */ 1123 struct { 1124 union { 1125 unsigned long long g[2]; 1126 unsigned char b[16]; 1127 } icv; 1128 unsigned char k[32]; 1129 } kmac_param; 1130 /* KMAC-AES parameter block - end */ 1131 1132 union { 1133 unsigned long long g[2]; 1134 unsigned char b[16]; 1135 } nonce; 1136 union { 1137 unsigned long long g[2]; 1138 unsigned char b[16]; 1139 } buf; 1140 1141 unsigned long long blocks; 1142 int l; 1143 int m; 1144 int tls_aad_len; 1145 int iv_set; 1146 int tag_set; 1147 int len_set; 1148 int key_set; 1149 1150 unsigned char pad[140]; 1151 unsigned int fc; 1152 } ccm; 1153 } aes; 1154 } S390X_AES_CCM_CTX; 1155 1156 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */ 1157 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6)) 1158 1159 /* Most modes of operation need km for partial block processing. */ 1160 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1161 S390X_CAPBIT(S390X_AES_128)) 1162 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1163 S390X_CAPBIT(S390X_AES_192)) 1164 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1165 S390X_CAPBIT(S390X_AES_256)) 1166 1167 # define s390x_aes_init_key aes_init_key 1168 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 1169 const unsigned char *iv, int enc); 1170 1171 # define S390X_aes_128_cbc_CAPABLE 0 /* checked by callee */ 1172 # define S390X_aes_192_cbc_CAPABLE 0 1173 # define S390X_aes_256_cbc_CAPABLE 0 1174 # define S390X_AES_CBC_CTX EVP_AES_KEY 1175 1176 # define s390x_aes_cbc_init_key aes_init_key 1177 1178 # define s390x_aes_cbc_cipher aes_cbc_cipher 1179 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1180 const unsigned char *in, size_t len); 1181 1182 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE 1183 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE 1184 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE 1185 1186 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx, 1187 const unsigned char *key, 1188 const unsigned char *iv, int enc) 1189 { 1190 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx); 1191 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1192 1193 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT); 1194 1195 if (key != NULL) 1196 memcpy(cctx->km.param.k, key, keylen); 1197 1198 return 1; 1199 } 1200 1201 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1202 const unsigned char *in, size_t len) 1203 { 1204 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx); 1205 1206 s390x_km(in, len, out, cctx->fc, &cctx->km.param); 1207 return 1; 1208 } 1209 1210 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \ 1211 (OPENSSL_s390xcap_P.kmo[0] & \ 1212 S390X_CAPBIT(S390X_AES_128))) 1213 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \ 1214 (OPENSSL_s390xcap_P.kmo[0] & \ 1215 S390X_CAPBIT(S390X_AES_192))) 1216 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \ 1217 (OPENSSL_s390xcap_P.kmo[0] & \ 1218 S390X_CAPBIT(S390X_AES_256))) 1219 1220 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx, 1221 const unsigned char *key, 1222 const unsigned char *ivec, int enc) 1223 { 1224 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx); 1225 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx); 1226 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1227 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1228 1229 cctx->fc = S390X_AES_FC(keylen); 1230 1231 if (key != NULL) 1232 memcpy(cctx->kmo.param.k, key, keylen); 1233 1234 cctx->res = 0; 1235 memcpy(cctx->kmo.param.cv, oiv, ivlen); 1236 return 1; 1237 } 1238 1239 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1240 const unsigned char *in, size_t len) 1241 { 1242 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx); 1243 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1244 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx); 1245 int n = cctx->res; 1246 int rem; 1247 1248 memcpy(cctx->kmo.param.cv, iv, ivlen); 1249 while (n && len) { 1250 *out = *in ^ cctx->kmo.param.cv[n]; 1251 n = (n + 1) & 0xf; 1252 --len; 1253 ++in; 1254 ++out; 1255 } 1256 1257 rem = len & 0xf; 1258 1259 len &= ~(size_t)0xf; 1260 if (len) { 1261 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param); 1262 1263 out += len; 1264 in += len; 1265 } 1266 1267 if (rem) { 1268 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc, 1269 cctx->kmo.param.k); 1270 1271 while (rem--) { 1272 out[n] = in[n] ^ cctx->kmo.param.cv[n]; 1273 ++n; 1274 } 1275 } 1276 1277 memcpy(iv, cctx->kmo.param.cv, ivlen); 1278 cctx->res = n; 1279 return 1; 1280 } 1281 1282 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \ 1283 (OPENSSL_s390xcap_P.kmf[0] & \ 1284 S390X_CAPBIT(S390X_AES_128))) 1285 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \ 1286 (OPENSSL_s390xcap_P.kmf[0] & \ 1287 S390X_CAPBIT(S390X_AES_192))) 1288 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \ 1289 (OPENSSL_s390xcap_P.kmf[0] & \ 1290 S390X_CAPBIT(S390X_AES_256))) 1291 1292 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx, 1293 const unsigned char *key, 1294 const unsigned char *ivec, int enc) 1295 { 1296 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1297 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx); 1298 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1299 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1300 1301 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT) 1302 | (16 << 24); /* 16 bytes cipher feedback */ 1303 1304 if (key != NULL) 1305 memcpy(cctx->kmf.param.k, key, keylen); 1306 1307 cctx->res = 0; 1308 memcpy(cctx->kmf.param.cv, oiv, ivlen); 1309 return 1; 1310 } 1311 1312 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1313 const unsigned char *in, size_t len) 1314 { 1315 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1316 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1317 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 1318 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1319 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx); 1320 int n = cctx->res; 1321 int rem; 1322 unsigned char tmp; 1323 1324 memcpy(cctx->kmf.param.cv, iv, ivlen); 1325 while (n && len) { 1326 tmp = *in; 1327 *out = cctx->kmf.param.cv[n] ^ tmp; 1328 cctx->kmf.param.cv[n] = enc ? *out : tmp; 1329 n = (n + 1) & 0xf; 1330 --len; 1331 ++in; 1332 ++out; 1333 } 1334 1335 rem = len & 0xf; 1336 1337 len &= ~(size_t)0xf; 1338 if (len) { 1339 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param); 1340 1341 out += len; 1342 in += len; 1343 } 1344 1345 if (rem) { 1346 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv, 1347 S390X_AES_FC(keylen), cctx->kmf.param.k); 1348 1349 while (rem--) { 1350 tmp = in[n]; 1351 out[n] = cctx->kmf.param.cv[n] ^ tmp; 1352 cctx->kmf.param.cv[n] = enc ? out[n] : tmp; 1353 ++n; 1354 } 1355 } 1356 1357 memcpy(iv, cctx->kmf.param.cv, ivlen); 1358 cctx->res = n; 1359 return 1; 1360 } 1361 1362 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1363 S390X_CAPBIT(S390X_AES_128)) 1364 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1365 S390X_CAPBIT(S390X_AES_192)) 1366 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1367 S390X_CAPBIT(S390X_AES_256)) 1368 1369 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx, 1370 const unsigned char *key, 1371 const unsigned char *ivec, int enc) 1372 { 1373 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1374 const unsigned char *oiv = EVP_CIPHER_CTX_original_iv(ctx); 1375 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1376 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1377 1378 cctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT) 1379 | (1 << 24); /* 1 byte cipher feedback flag */ 1380 1381 if (key != NULL) 1382 memcpy(cctx->kmf.param.k, key, keylen); 1383 1384 cctx->res = 0; 1385 memcpy(cctx->kmf.param.cv, oiv, ivlen); 1386 return 1; 1387 } 1388 1389 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1390 const unsigned char *in, size_t len) 1391 { 1392 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1393 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1394 unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx); 1395 1396 memcpy(cctx->kmf.param.cv, iv, ivlen); 1397 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param); 1398 memcpy(iv, cctx->kmf.param.cv, ivlen); 1399 return 1; 1400 } 1401 1402 # define S390X_aes_128_cfb1_CAPABLE 0 1403 # define S390X_aes_192_cfb1_CAPABLE 0 1404 # define S390X_aes_256_cfb1_CAPABLE 0 1405 1406 # define s390x_aes_cfb1_init_key aes_init_key 1407 1408 # define s390x_aes_cfb1_cipher aes_cfb1_cipher 1409 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1410 const unsigned char *in, size_t len); 1411 1412 # define S390X_aes_128_ctr_CAPABLE 0 /* checked by callee */ 1413 # define S390X_aes_192_ctr_CAPABLE 0 1414 # define S390X_aes_256_ctr_CAPABLE 0 1415 # define S390X_AES_CTR_CTX EVP_AES_KEY 1416 1417 # define s390x_aes_ctr_init_key aes_init_key 1418 1419 # define s390x_aes_ctr_cipher aes_ctr_cipher 1420 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1421 const unsigned char *in, size_t len); 1422 1423 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \ 1424 (OPENSSL_s390xcap_P.kma[0] & \ 1425 S390X_CAPBIT(S390X_AES_128))) 1426 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \ 1427 (OPENSSL_s390xcap_P.kma[0] & \ 1428 S390X_CAPBIT(S390X_AES_192))) 1429 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \ 1430 (OPENSSL_s390xcap_P.kma[0] & \ 1431 S390X_CAPBIT(S390X_AES_256))) 1432 1433 /* iv + padding length for iv lengths != 12 */ 1434 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16) 1435 1436 /*- 1437 * Process additional authenticated data. Returns 0 on success. Code is 1438 * big-endian. 1439 */ 1440 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad, 1441 size_t len) 1442 { 1443 unsigned long long alen; 1444 int n, rem; 1445 1446 if (ctx->kma.param.tpcl) 1447 return -2; 1448 1449 alen = ctx->kma.param.taadl + len; 1450 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len)) 1451 return -1; 1452 ctx->kma.param.taadl = alen; 1453 1454 n = ctx->areslen; 1455 if (n) { 1456 while (n && len) { 1457 ctx->ares[n] = *aad; 1458 n = (n + 1) & 0xf; 1459 ++aad; 1460 --len; 1461 } 1462 /* ctx->ares contains a complete block if offset has wrapped around */ 1463 if (!n) { 1464 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param); 1465 ctx->fc |= S390X_KMA_HS; 1466 } 1467 ctx->areslen = n; 1468 } 1469 1470 rem = len & 0xf; 1471 1472 len &= ~(size_t)0xf; 1473 if (len) { 1474 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param); 1475 aad += len; 1476 ctx->fc |= S390X_KMA_HS; 1477 } 1478 1479 if (rem) { 1480 ctx->areslen = rem; 1481 1482 do { 1483 --rem; 1484 ctx->ares[rem] = aad[rem]; 1485 } while (rem); 1486 } 1487 return 0; 1488 } 1489 1490 /*- 1491 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for 1492 * success. Code is big-endian. 1493 */ 1494 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in, 1495 unsigned char *out, size_t len) 1496 { 1497 const unsigned char *inptr; 1498 unsigned long long mlen; 1499 union { 1500 unsigned int w[4]; 1501 unsigned char b[16]; 1502 } buf; 1503 size_t inlen; 1504 int n, rem, i; 1505 1506 mlen = ctx->kma.param.tpcl + len; 1507 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len)) 1508 return -1; 1509 ctx->kma.param.tpcl = mlen; 1510 1511 n = ctx->mreslen; 1512 if (n) { 1513 inptr = in; 1514 inlen = len; 1515 while (n && inlen) { 1516 ctx->mres[n] = *inptr; 1517 n = (n + 1) & 0xf; 1518 ++inptr; 1519 --inlen; 1520 } 1521 /* ctx->mres contains a complete block if offset has wrapped around */ 1522 if (!n) { 1523 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b, 1524 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param); 1525 ctx->fc |= S390X_KMA_HS; 1526 ctx->areslen = 0; 1527 1528 /* previous call already encrypted/decrypted its remainder, 1529 * see comment below */ 1530 n = ctx->mreslen; 1531 while (n) { 1532 *out = buf.b[n]; 1533 n = (n + 1) & 0xf; 1534 ++out; 1535 ++in; 1536 --len; 1537 } 1538 ctx->mreslen = 0; 1539 } 1540 } 1541 1542 rem = len & 0xf; 1543 1544 len &= ~(size_t)0xf; 1545 if (len) { 1546 s390x_kma(ctx->ares, ctx->areslen, in, len, out, 1547 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param); 1548 in += len; 1549 out += len; 1550 ctx->fc |= S390X_KMA_HS; 1551 ctx->areslen = 0; 1552 } 1553 1554 /*- 1555 * If there is a remainder, it has to be saved such that it can be 1556 * processed by kma later. However, we also have to do the for-now 1557 * unauthenticated encryption/decryption part here and now... 1558 */ 1559 if (rem) { 1560 if (!ctx->mreslen) { 1561 buf.w[0] = ctx->kma.param.j0.w[0]; 1562 buf.w[1] = ctx->kma.param.j0.w[1]; 1563 buf.w[2] = ctx->kma.param.j0.w[2]; 1564 buf.w[3] = ctx->kma.param.cv.w + 1; 1565 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k); 1566 } 1567 1568 n = ctx->mreslen; 1569 for (i = 0; i < rem; i++) { 1570 ctx->mres[n + i] = in[i]; 1571 out[i] = in[i] ^ ctx->kres[n + i]; 1572 } 1573 1574 ctx->mreslen += rem; 1575 } 1576 return 0; 1577 } 1578 1579 /*- 1580 * Initialize context structure. Code is big-endian. 1581 */ 1582 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx) 1583 { 1584 ctx->kma.param.t.g[0] = 0; 1585 ctx->kma.param.t.g[1] = 0; 1586 ctx->kma.param.tpcl = 0; 1587 ctx->kma.param.taadl = 0; 1588 ctx->mreslen = 0; 1589 ctx->areslen = 0; 1590 ctx->kreslen = 0; 1591 1592 if (ctx->ivlen == 12) { 1593 memcpy(&ctx->kma.param.j0, ctx->iv, ctx->ivlen); 1594 ctx->kma.param.j0.w[3] = 1; 1595 ctx->kma.param.cv.w = 1; 1596 } else { 1597 /* ctx->iv has the right size and is already padded. */ 1598 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL, 1599 ctx->fc, &ctx->kma.param); 1600 ctx->fc |= S390X_KMA_HS; 1601 1602 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0]; 1603 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1]; 1604 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3]; 1605 ctx->kma.param.t.g[0] = 0; 1606 ctx->kma.param.t.g[1] = 0; 1607 } 1608 } 1609 1610 /*- 1611 * Performs various operations on the context structure depending on control 1612 * type. Returns 1 for success, 0 for failure and -1 for unknown control type. 1613 * Code is big-endian. 1614 */ 1615 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 1616 { 1617 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c); 1618 S390X_AES_GCM_CTX *gctx_out; 1619 EVP_CIPHER_CTX *out; 1620 unsigned char *buf, *iv; 1621 int ivlen, enc, len; 1622 1623 switch (type) { 1624 case EVP_CTRL_INIT: 1625 ivlen = EVP_CIPHER_iv_length(c->cipher); 1626 iv = EVP_CIPHER_CTX_iv_noconst(c); 1627 gctx->key_set = 0; 1628 gctx->iv_set = 0; 1629 gctx->ivlen = ivlen; 1630 gctx->iv = iv; 1631 gctx->taglen = -1; 1632 gctx->iv_gen = 0; 1633 gctx->tls_aad_len = -1; 1634 return 1; 1635 1636 case EVP_CTRL_GET_IVLEN: 1637 *(int *)ptr = gctx->ivlen; 1638 return 1; 1639 1640 case EVP_CTRL_AEAD_SET_IVLEN: 1641 if (arg <= 0) 1642 return 0; 1643 1644 if (arg != 12) { 1645 iv = EVP_CIPHER_CTX_iv_noconst(c); 1646 len = S390X_gcm_ivpadlen(arg); 1647 1648 /* Allocate memory for iv if needed. */ 1649 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) { 1650 if (gctx->iv != iv) 1651 OPENSSL_free(gctx->iv); 1652 1653 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) { 1654 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 1655 return 0; 1656 } 1657 } 1658 /* Add padding. */ 1659 memset(gctx->iv + arg, 0, len - arg - 8); 1660 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3; 1661 } 1662 gctx->ivlen = arg; 1663 return 1; 1664 1665 case EVP_CTRL_AEAD_SET_TAG: 1666 buf = EVP_CIPHER_CTX_buf_noconst(c); 1667 enc = EVP_CIPHER_CTX_encrypting(c); 1668 if (arg <= 0 || arg > 16 || enc) 1669 return 0; 1670 1671 memcpy(buf, ptr, arg); 1672 gctx->taglen = arg; 1673 return 1; 1674 1675 case EVP_CTRL_AEAD_GET_TAG: 1676 enc = EVP_CIPHER_CTX_encrypting(c); 1677 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0) 1678 return 0; 1679 1680 memcpy(ptr, gctx->kma.param.t.b, arg); 1681 return 1; 1682 1683 case EVP_CTRL_GCM_SET_IV_FIXED: 1684 /* Special case: -1 length restores whole iv */ 1685 if (arg == -1) { 1686 memcpy(gctx->iv, ptr, gctx->ivlen); 1687 gctx->iv_gen = 1; 1688 return 1; 1689 } 1690 /* 1691 * Fixed field must be at least 4 bytes and invocation field at least 1692 * 8. 1693 */ 1694 if ((arg < 4) || (gctx->ivlen - arg) < 8) 1695 return 0; 1696 1697 if (arg) 1698 memcpy(gctx->iv, ptr, arg); 1699 1700 enc = EVP_CIPHER_CTX_encrypting(c); 1701 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) 1702 return 0; 1703 1704 gctx->iv_gen = 1; 1705 return 1; 1706 1707 case EVP_CTRL_GCM_IV_GEN: 1708 if (gctx->iv_gen == 0 || gctx->key_set == 0) 1709 return 0; 1710 1711 s390x_aes_gcm_setiv(gctx); 1712 1713 if (arg <= 0 || arg > gctx->ivlen) 1714 arg = gctx->ivlen; 1715 1716 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); 1717 /* 1718 * Invocation field will be at least 8 bytes in size and so no need 1719 * to check wrap around or increment more than last 8 bytes. 1720 */ 1721 ctr64_inc(gctx->iv + gctx->ivlen - 8); 1722 gctx->iv_set = 1; 1723 return 1; 1724 1725 case EVP_CTRL_GCM_SET_IV_INV: 1726 enc = EVP_CIPHER_CTX_encrypting(c); 1727 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc) 1728 return 0; 1729 1730 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); 1731 s390x_aes_gcm_setiv(gctx); 1732 gctx->iv_set = 1; 1733 return 1; 1734 1735 case EVP_CTRL_AEAD_TLS1_AAD: 1736 /* Save the aad for later use. */ 1737 if (arg != EVP_AEAD_TLS1_AAD_LEN) 1738 return 0; 1739 1740 buf = EVP_CIPHER_CTX_buf_noconst(c); 1741 memcpy(buf, ptr, arg); 1742 gctx->tls_aad_len = arg; 1743 1744 len = buf[arg - 2] << 8 | buf[arg - 1]; 1745 /* Correct length for explicit iv. */ 1746 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) 1747 return 0; 1748 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; 1749 1750 /* If decrypting correct for tag too. */ 1751 enc = EVP_CIPHER_CTX_encrypting(c); 1752 if (!enc) { 1753 if (len < EVP_GCM_TLS_TAG_LEN) 1754 return 0; 1755 len -= EVP_GCM_TLS_TAG_LEN; 1756 } 1757 buf[arg - 2] = len >> 8; 1758 buf[arg - 1] = len & 0xff; 1759 /* Extra padding: tag appended to record. */ 1760 return EVP_GCM_TLS_TAG_LEN; 1761 1762 case EVP_CTRL_COPY: 1763 out = ptr; 1764 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out); 1765 iv = EVP_CIPHER_CTX_iv_noconst(c); 1766 1767 if (gctx->iv == iv) { 1768 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out); 1769 } else { 1770 len = S390X_gcm_ivpadlen(gctx->ivlen); 1771 1772 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) { 1773 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 1774 return 0; 1775 } 1776 1777 memcpy(gctx_out->iv, gctx->iv, len); 1778 } 1779 return 1; 1780 1781 default: 1782 return -1; 1783 } 1784 } 1785 1786 /*- 1787 * Set key or iv or enc/dec. Returns 1 on success. Otherwise 0 is returned. 1788 */ 1789 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx, 1790 const unsigned char *key, 1791 const unsigned char *iv, int enc) 1792 { 1793 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1794 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1795 1796 gctx->fc = S390X_AES_FC(keylen) | (enc ? 0 : S390X_DECRYPT); 1797 1798 if (key != NULL) { 1799 gctx->fc &= ~S390X_KMA_HS; 1800 memcpy(&gctx->kma.param.k, key, keylen); 1801 gctx->key_set = 1; 1802 } 1803 1804 if (iv != NULL) { 1805 memcpy(gctx->iv, iv, gctx->ivlen); 1806 gctx->iv_gen = 0; 1807 gctx->iv_set = 1; 1808 } 1809 1810 if (gctx->key_set && gctx->iv_set) 1811 s390x_aes_gcm_setiv(gctx); 1812 1813 gctx->fc &= ~(S390X_KMA_LPC | S390X_KMA_LAAD); 1814 gctx->areslen = 0; 1815 gctx->mreslen = 0; 1816 gctx->kreslen = 0; 1817 return 1; 1818 } 1819 1820 /*- 1821 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written 1822 * if successful. Otherwise -1 is returned. Code is big-endian. 1823 */ 1824 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1825 const unsigned char *in, size_t len) 1826 { 1827 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1828 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx); 1829 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 1830 int rv = -1; 1831 1832 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN)) 1833 return -1; 1834 1835 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN 1836 : EVP_CTRL_GCM_SET_IV_INV, 1837 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) 1838 goto err; 1839 1840 in += EVP_GCM_TLS_EXPLICIT_IV_LEN; 1841 out += EVP_GCM_TLS_EXPLICIT_IV_LEN; 1842 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 1843 1844 gctx->kma.param.taadl = gctx->tls_aad_len << 3; 1845 gctx->kma.param.tpcl = len << 3; 1846 s390x_kma(buf, gctx->tls_aad_len, in, len, out, 1847 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param); 1848 1849 if (enc) { 1850 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN); 1851 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 1852 } else { 1853 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len, 1854 EVP_GCM_TLS_TAG_LEN)) { 1855 OPENSSL_cleanse(out, len); 1856 goto err; 1857 } 1858 rv = len; 1859 } 1860 err: 1861 gctx->iv_set = 0; 1862 gctx->tls_aad_len = -1; 1863 return rv; 1864 } 1865 1866 /*- 1867 * Called from EVP layer to initialize context, process additional 1868 * authenticated data, en/de-crypt plain/cipher-text and authenticate 1869 * ciphertext or process a TLS packet, depending on context. Returns bytes 1870 * written on success. Otherwise -1 is returned. Code is big-endian. 1871 */ 1872 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1873 const unsigned char *in, size_t len) 1874 { 1875 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1876 unsigned char *buf, tmp[16]; 1877 int enc; 1878 1879 if (!gctx->key_set) 1880 return -1; 1881 1882 if (gctx->tls_aad_len >= 0) 1883 return s390x_aes_gcm_tls_cipher(ctx, out, in, len); 1884 1885 if (!gctx->iv_set) 1886 return -1; 1887 1888 if (in != NULL) { 1889 if (out == NULL) { 1890 if (s390x_aes_gcm_aad(gctx, in, len)) 1891 return -1; 1892 } else { 1893 if (s390x_aes_gcm(gctx, in, out, len)) 1894 return -1; 1895 } 1896 return len; 1897 } else { 1898 gctx->kma.param.taadl <<= 3; 1899 gctx->kma.param.tpcl <<= 3; 1900 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp, 1901 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param); 1902 /* recall that we already did en-/decrypt gctx->mres 1903 * and returned it to caller... */ 1904 OPENSSL_cleanse(tmp, gctx->mreslen); 1905 1906 enc = EVP_CIPHER_CTX_encrypting(ctx); 1907 if (enc) { 1908 gctx->taglen = 16; 1909 } else { 1910 if (gctx->taglen < 0) 1911 return -1; 1912 1913 buf = EVP_CIPHER_CTX_buf_noconst(ctx); 1914 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen)) 1915 return -1; 1916 } 1917 return 0; 1918 } 1919 } 1920 1921 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c) 1922 { 1923 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c); 1924 const unsigned char *iv; 1925 1926 if (gctx == NULL) 1927 return 0; 1928 1929 iv = EVP_CIPHER_CTX_iv(c); 1930 if (iv != gctx->iv) 1931 OPENSSL_free(gctx->iv); 1932 1933 OPENSSL_cleanse(gctx, sizeof(*gctx)); 1934 return 1; 1935 } 1936 1937 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX 1938 # define S390X_aes_128_xts_CAPABLE 0 /* checked by callee */ 1939 # define S390X_aes_256_xts_CAPABLE 0 1940 1941 # define s390x_aes_xts_init_key aes_xts_init_key 1942 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx, 1943 const unsigned char *key, 1944 const unsigned char *iv, int enc); 1945 # define s390x_aes_xts_cipher aes_xts_cipher 1946 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1947 const unsigned char *in, size_t len); 1948 # define s390x_aes_xts_ctrl aes_xts_ctrl 1949 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr); 1950 # define s390x_aes_xts_cleanup aes_xts_cleanup 1951 1952 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \ 1953 (OPENSSL_s390xcap_P.kmac[0] & \ 1954 S390X_CAPBIT(S390X_AES_128))) 1955 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \ 1956 (OPENSSL_s390xcap_P.kmac[0] & \ 1957 S390X_CAPBIT(S390X_AES_192))) 1958 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \ 1959 (OPENSSL_s390xcap_P.kmac[0] & \ 1960 S390X_CAPBIT(S390X_AES_256))) 1961 1962 # define S390X_CCM_AAD_FLAG 0x40 1963 1964 /*- 1965 * Set nonce and length fields. Code is big-endian. 1966 */ 1967 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx, 1968 const unsigned char *nonce, 1969 size_t mlen) 1970 { 1971 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG; 1972 ctx->aes.ccm.nonce.g[1] = mlen; 1973 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l); 1974 } 1975 1976 /*- 1977 * Process additional authenticated data. Code is big-endian. 1978 */ 1979 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad, 1980 size_t alen) 1981 { 1982 unsigned char *ptr; 1983 int i, rem; 1984 1985 if (!alen) 1986 return; 1987 1988 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG; 1989 1990 /* Suppress 'type-punned pointer dereference' warning. */ 1991 ptr = ctx->aes.ccm.buf.b; 1992 1993 if (alen < ((1 << 16) - (1 << 8))) { 1994 *(uint16_t *)ptr = alen; 1995 i = 2; 1996 } else if (sizeof(alen) == 8 1997 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) { 1998 *(uint16_t *)ptr = 0xffff; 1999 *(uint64_t *)(ptr + 2) = alen; 2000 i = 10; 2001 } else { 2002 *(uint16_t *)ptr = 0xfffe; 2003 *(uint32_t *)(ptr + 2) = alen; 2004 i = 6; 2005 } 2006 2007 while (i < 16 && alen) { 2008 ctx->aes.ccm.buf.b[i] = *aad; 2009 ++aad; 2010 --alen; 2011 ++i; 2012 } 2013 while (i < 16) { 2014 ctx->aes.ccm.buf.b[i] = 0; 2015 ++i; 2016 } 2017 2018 ctx->aes.ccm.kmac_param.icv.g[0] = 0; 2019 ctx->aes.ccm.kmac_param.icv.g[1] = 0; 2020 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc, 2021 &ctx->aes.ccm.kmac_param); 2022 ctx->aes.ccm.blocks += 2; 2023 2024 rem = alen & 0xf; 2025 alen &= ~(size_t)0xf; 2026 if (alen) { 2027 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 2028 ctx->aes.ccm.blocks += alen >> 4; 2029 aad += alen; 2030 } 2031 if (rem) { 2032 for (i = 0; i < rem; i++) 2033 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i]; 2034 2035 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 2036 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 2037 ctx->aes.ccm.kmac_param.k); 2038 ctx->aes.ccm.blocks++; 2039 } 2040 } 2041 2042 /*- 2043 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for 2044 * success. 2045 */ 2046 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in, 2047 unsigned char *out, size_t len, int enc) 2048 { 2049 size_t n, rem; 2050 unsigned int i, l, num; 2051 unsigned char flags; 2052 2053 flags = ctx->aes.ccm.nonce.b[0]; 2054 if (!(flags & S390X_CCM_AAD_FLAG)) { 2055 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b, 2056 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k); 2057 ctx->aes.ccm.blocks++; 2058 } 2059 l = flags & 0x7; 2060 ctx->aes.ccm.nonce.b[0] = l; 2061 2062 /*- 2063 * Reconstruct length from encoded length field 2064 * and initialize it with counter value. 2065 */ 2066 n = 0; 2067 for (i = 15 - l; i < 15; i++) { 2068 n |= ctx->aes.ccm.nonce.b[i]; 2069 ctx->aes.ccm.nonce.b[i] = 0; 2070 n <<= 8; 2071 } 2072 n |= ctx->aes.ccm.nonce.b[15]; 2073 ctx->aes.ccm.nonce.b[15] = 1; 2074 2075 if (n != len) 2076 return -1; /* length mismatch */ 2077 2078 if (enc) { 2079 /* Two operations per block plus one for tag encryption */ 2080 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1; 2081 if (ctx->aes.ccm.blocks > (1ULL << 61)) 2082 return -2; /* too much data */ 2083 } 2084 2085 num = 0; 2086 rem = len & 0xf; 2087 len &= ~(size_t)0xf; 2088 2089 if (enc) { 2090 /* mac-then-encrypt */ 2091 if (len) 2092 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 2093 if (rem) { 2094 for (i = 0; i < rem; i++) 2095 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i]; 2096 2097 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 2098 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 2099 ctx->aes.ccm.kmac_param.k); 2100 } 2101 2102 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k, 2103 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b, 2104 &num, (ctr128_f)AES_ctr32_encrypt); 2105 } else { 2106 /* decrypt-then-mac */ 2107 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k, 2108 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b, 2109 &num, (ctr128_f)AES_ctr32_encrypt); 2110 2111 if (len) 2112 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 2113 if (rem) { 2114 for (i = 0; i < rem; i++) 2115 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i]; 2116 2117 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 2118 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 2119 ctx->aes.ccm.kmac_param.k); 2120 } 2121 } 2122 /* encrypt tag */ 2123 for (i = 15 - l; i < 16; i++) 2124 ctx->aes.ccm.nonce.b[i] = 0; 2125 2126 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc, 2127 ctx->aes.ccm.kmac_param.k); 2128 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0]; 2129 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1]; 2130 2131 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */ 2132 return 0; 2133 } 2134 2135 /*- 2136 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written 2137 * if successful. Otherwise -1 is returned. 2138 */ 2139 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2140 const unsigned char *in, size_t len) 2141 { 2142 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2143 const unsigned char *ivec = EVP_CIPHER_CTX_iv(ctx); 2144 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx); 2145 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 2146 unsigned char iv[EVP_MAX_IV_LENGTH]; 2147 2148 if (out != in 2149 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m)) 2150 return -1; 2151 2152 if (enc) { 2153 /* Set explicit iv (sequence number). */ 2154 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN); 2155 } 2156 2157 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m; 2158 /*- 2159 * Get explicit iv (sequence number). We already have fixed iv 2160 * (server/client_write_iv) here. 2161 */ 2162 memcpy(iv, ivec, sizeof(iv)); 2163 memcpy(iv + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN); 2164 s390x_aes_ccm_setiv(cctx, iv, len); 2165 2166 /* Process aad (sequence number|type|version|length) */ 2167 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len); 2168 2169 in += EVP_CCM_TLS_EXPLICIT_IV_LEN; 2170 out += EVP_CCM_TLS_EXPLICIT_IV_LEN; 2171 2172 if (enc) { 2173 if (s390x_aes_ccm(cctx, in, out, len, enc)) 2174 return -1; 2175 2176 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m); 2177 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m; 2178 } else { 2179 if (!s390x_aes_ccm(cctx, in, out, len, enc)) { 2180 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len, 2181 cctx->aes.ccm.m)) 2182 return len; 2183 } 2184 2185 OPENSSL_cleanse(out, len); 2186 return -1; 2187 } 2188 } 2189 2190 /*- 2191 * Set key or iv or enc/dec. Returns 1 if successful. 2192 * Otherwise 0 is returned. 2193 */ 2194 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx, 2195 const unsigned char *key, 2196 const unsigned char *iv, int enc) 2197 { 2198 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2199 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 2200 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2201 2202 cctx->aes.ccm.fc = S390X_AES_FC(keylen); 2203 2204 if (key != NULL) { 2205 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen); 2206 cctx->aes.ccm.key_set = 1; 2207 } 2208 if (iv != NULL) { 2209 memcpy(ivec, iv, 15 - cctx->aes.ccm.l); 2210 cctx->aes.ccm.iv_set = 1; 2211 } 2212 2213 /* Store encoded m and l. */ 2214 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7) 2215 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3; 2216 memset(cctx->aes.ccm.nonce.b + 1, 0, sizeof(cctx->aes.ccm.nonce.b) - 1); 2217 2218 cctx->aes.ccm.blocks = 0; 2219 cctx->aes.ccm.len_set = 0; 2220 return 1; 2221 } 2222 2223 /*- 2224 * Called from EVP layer to initialize context, process additional 2225 * authenticated data, en/de-crypt plain/cipher-text and authenticate 2226 * plaintext or process a TLS packet, depending on context. Returns bytes 2227 * written on success. Otherwise -1 is returned. 2228 */ 2229 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2230 const unsigned char *in, size_t len) 2231 { 2232 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2233 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 2234 const unsigned char *ivec = EVP_CIPHER_CTX_iv(ctx); 2235 unsigned char *buf; 2236 int rv; 2237 2238 if (!cctx->aes.ccm.key_set) 2239 return -1; 2240 2241 if (cctx->aes.ccm.tls_aad_len >= 0) 2242 return s390x_aes_ccm_tls_cipher(ctx, out, in, len); 2243 2244 /*- 2245 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt 2246 * so integrity must be checked already at Update() i.e., before 2247 * potentially corrupted data is output. 2248 */ 2249 if (in == NULL && out != NULL) 2250 return 0; 2251 2252 if (!cctx->aes.ccm.iv_set) 2253 return -1; 2254 2255 if (out == NULL) { 2256 /* Update(): Pass message length. */ 2257 if (in == NULL) { 2258 s390x_aes_ccm_setiv(cctx, ivec, len); 2259 2260 cctx->aes.ccm.len_set = 1; 2261 return len; 2262 } 2263 2264 /* Update(): Process aad. */ 2265 if (!cctx->aes.ccm.len_set && len) 2266 return -1; 2267 2268 s390x_aes_ccm_aad(cctx, in, len); 2269 return len; 2270 } 2271 2272 /* The tag must be set before actually decrypting data */ 2273 if (!enc && !cctx->aes.ccm.tag_set) 2274 return -1; 2275 2276 /* Update(): Process message. */ 2277 2278 if (!cctx->aes.ccm.len_set) { 2279 /*- 2280 * In case message length was not previously set explicitly via 2281 * Update(), set it now. 2282 */ 2283 s390x_aes_ccm_setiv(cctx, ivec, len); 2284 2285 cctx->aes.ccm.len_set = 1; 2286 } 2287 2288 if (enc) { 2289 if (s390x_aes_ccm(cctx, in, out, len, enc)) 2290 return -1; 2291 2292 cctx->aes.ccm.tag_set = 1; 2293 return len; 2294 } else { 2295 rv = -1; 2296 2297 if (!s390x_aes_ccm(cctx, in, out, len, enc)) { 2298 buf = EVP_CIPHER_CTX_buf_noconst(ctx); 2299 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf, 2300 cctx->aes.ccm.m)) 2301 rv = len; 2302 } 2303 2304 if (rv == -1) 2305 OPENSSL_cleanse(out, len); 2306 2307 return rv; 2308 } 2309 } 2310 2311 /*- 2312 * Performs various operations on the context structure depending on control 2313 * type. Returns 1 for success, 0 for failure and -1 for unknown control type. 2314 * Code is big-endian. 2315 */ 2316 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 2317 { 2318 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c); 2319 unsigned char *buf, *iv; 2320 int enc, len; 2321 2322 switch (type) { 2323 case EVP_CTRL_INIT: 2324 cctx->aes.ccm.key_set = 0; 2325 cctx->aes.ccm.iv_set = 0; 2326 cctx->aes.ccm.l = 8; 2327 cctx->aes.ccm.m = 12; 2328 cctx->aes.ccm.tag_set = 0; 2329 cctx->aes.ccm.len_set = 0; 2330 cctx->aes.ccm.tls_aad_len = -1; 2331 return 1; 2332 2333 case EVP_CTRL_GET_IVLEN: 2334 *(int *)ptr = 15 - cctx->aes.ccm.l; 2335 return 1; 2336 2337 case EVP_CTRL_AEAD_TLS1_AAD: 2338 if (arg != EVP_AEAD_TLS1_AAD_LEN) 2339 return 0; 2340 2341 /* Save the aad for later use. */ 2342 buf = EVP_CIPHER_CTX_buf_noconst(c); 2343 memcpy(buf, ptr, arg); 2344 cctx->aes.ccm.tls_aad_len = arg; 2345 2346 len = buf[arg - 2] << 8 | buf[arg - 1]; 2347 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN) 2348 return 0; 2349 2350 /* Correct length for explicit iv. */ 2351 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN; 2352 2353 enc = EVP_CIPHER_CTX_encrypting(c); 2354 if (!enc) { 2355 if (len < cctx->aes.ccm.m) 2356 return 0; 2357 2358 /* Correct length for tag. */ 2359 len -= cctx->aes.ccm.m; 2360 } 2361 2362 buf[arg - 2] = len >> 8; 2363 buf[arg - 1] = len & 0xff; 2364 2365 /* Extra padding: tag appended to record. */ 2366 return cctx->aes.ccm.m; 2367 2368 case EVP_CTRL_CCM_SET_IV_FIXED: 2369 if (arg != EVP_CCM_TLS_FIXED_IV_LEN) 2370 return 0; 2371 2372 /* Copy to first part of the iv. */ 2373 iv = EVP_CIPHER_CTX_iv_noconst(c); 2374 memcpy(iv, ptr, arg); 2375 return 1; 2376 2377 case EVP_CTRL_AEAD_SET_IVLEN: 2378 arg = 15 - arg; 2379 /* fall-through */ 2380 2381 case EVP_CTRL_CCM_SET_L: 2382 if (arg < 2 || arg > 8) 2383 return 0; 2384 2385 cctx->aes.ccm.l = arg; 2386 return 1; 2387 2388 case EVP_CTRL_AEAD_SET_TAG: 2389 if ((arg & 1) || arg < 4 || arg > 16) 2390 return 0; 2391 2392 enc = EVP_CIPHER_CTX_encrypting(c); 2393 if (enc && ptr) 2394 return 0; 2395 2396 if (ptr) { 2397 cctx->aes.ccm.tag_set = 1; 2398 buf = EVP_CIPHER_CTX_buf_noconst(c); 2399 memcpy(buf, ptr, arg); 2400 } 2401 2402 cctx->aes.ccm.m = arg; 2403 return 1; 2404 2405 case EVP_CTRL_AEAD_GET_TAG: 2406 enc = EVP_CIPHER_CTX_encrypting(c); 2407 if (!enc || !cctx->aes.ccm.tag_set) 2408 return 0; 2409 2410 if(arg < cctx->aes.ccm.m) 2411 return 0; 2412 2413 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m); 2414 return 1; 2415 2416 case EVP_CTRL_COPY: 2417 return 1; 2418 2419 default: 2420 return -1; 2421 } 2422 } 2423 2424 # define s390x_aes_ccm_cleanup aes_ccm_cleanup 2425 2426 # ifndef OPENSSL_NO_OCB 2427 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX 2428 # define S390X_aes_128_ocb_CAPABLE 0 2429 # define S390X_aes_192_ocb_CAPABLE 0 2430 # define S390X_aes_256_ocb_CAPABLE 0 2431 2432 # define s390x_aes_ocb_init_key aes_ocb_init_key 2433 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 2434 const unsigned char *iv, int enc); 2435 # define s390x_aes_ocb_cipher aes_ocb_cipher 2436 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2437 const unsigned char *in, size_t len); 2438 # define s390x_aes_ocb_cleanup aes_ocb_cleanup 2439 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *); 2440 # define s390x_aes_ocb_ctrl aes_ocb_ctrl 2441 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr); 2442 # endif 2443 2444 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \ 2445 MODE,flags) \ 2446 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \ 2447 nid##_##keylen##_##nmode,blocksize, \ 2448 keylen / 8, \ 2449 ivlen, \ 2450 flags | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_##MODE##_MODE, \ 2451 s390x_aes_##mode##_init_key, \ 2452 s390x_aes_##mode##_cipher, \ 2453 NULL, \ 2454 sizeof(S390X_AES_##MODE##_CTX), \ 2455 NULL, \ 2456 NULL, \ 2457 NULL, \ 2458 NULL \ 2459 }; \ 2460 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2461 nid##_##keylen##_##nmode, \ 2462 blocksize, \ 2463 keylen / 8, \ 2464 ivlen, \ 2465 flags | EVP_CIPH_##MODE##_MODE, \ 2466 aes_init_key, \ 2467 aes_##mode##_cipher, \ 2468 NULL, \ 2469 sizeof(EVP_AES_KEY), \ 2470 NULL, \ 2471 NULL, \ 2472 NULL, \ 2473 NULL \ 2474 }; \ 2475 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2476 { \ 2477 return S390X_aes_##keylen##_##mode##_CAPABLE ? \ 2478 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \ 2479 } 2480 2481 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\ 2482 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \ 2483 nid##_##keylen##_##mode, \ 2484 blocksize, \ 2485 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \ 2486 ivlen, \ 2487 flags | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_##MODE##_MODE, \ 2488 s390x_aes_##mode##_init_key, \ 2489 s390x_aes_##mode##_cipher, \ 2490 s390x_aes_##mode##_cleanup, \ 2491 sizeof(S390X_AES_##MODE##_CTX), \ 2492 NULL, \ 2493 NULL, \ 2494 s390x_aes_##mode##_ctrl, \ 2495 NULL \ 2496 }; \ 2497 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2498 nid##_##keylen##_##mode,blocksize, \ 2499 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \ 2500 ivlen, \ 2501 flags | EVP_CIPH_##MODE##_MODE, \ 2502 aes_##mode##_init_key, \ 2503 aes_##mode##_cipher, \ 2504 aes_##mode##_cleanup, \ 2505 sizeof(EVP_AES_##MODE##_CTX), \ 2506 NULL, \ 2507 NULL, \ 2508 aes_##mode##_ctrl, \ 2509 NULL \ 2510 }; \ 2511 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2512 { \ 2513 return S390X_aes_##keylen##_##mode##_CAPABLE ? \ 2514 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \ 2515 } 2516 2517 #else 2518 2519 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 2520 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2521 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 2522 flags|EVP_CIPH_##MODE##_MODE, \ 2523 aes_init_key, \ 2524 aes_##mode##_cipher, \ 2525 NULL, \ 2526 sizeof(EVP_AES_KEY), \ 2527 NULL,NULL,NULL,NULL }; \ 2528 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2529 { return &aes_##keylen##_##mode; } 2530 2531 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 2532 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2533 nid##_##keylen##_##mode,blocksize, \ 2534 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 2535 flags|EVP_CIPH_##MODE##_MODE, \ 2536 aes_##mode##_init_key, \ 2537 aes_##mode##_cipher, \ 2538 aes_##mode##_cleanup, \ 2539 sizeof(EVP_AES_##MODE##_CTX), \ 2540 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 2541 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2542 { return &aes_##keylen##_##mode; } 2543 2544 #endif 2545 2546 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__)) 2547 # include "arm_arch.h" 2548 # if __ARM_MAX_ARCH__>=7 2549 # if defined(BSAES_ASM) 2550 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON) 2551 # endif 2552 # if defined(VPAES_ASM) 2553 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON) 2554 # endif 2555 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES) 2556 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key 2557 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key 2558 # define HWAES_encrypt aes_v8_encrypt 2559 # define HWAES_decrypt aes_v8_decrypt 2560 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt 2561 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks 2562 # endif 2563 #endif 2564 2565 #if defined(HWAES_CAPABLE) 2566 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits, 2567 AES_KEY *key); 2568 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits, 2569 AES_KEY *key); 2570 void HWAES_encrypt(const unsigned char *in, unsigned char *out, 2571 const AES_KEY *key); 2572 void HWAES_decrypt(const unsigned char *in, unsigned char *out, 2573 const AES_KEY *key); 2574 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out, 2575 size_t length, const AES_KEY *key, 2576 unsigned char *ivec, const int enc); 2577 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, 2578 size_t len, const AES_KEY *key, 2579 const unsigned char ivec[16]); 2580 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out, 2581 size_t len, const AES_KEY *key1, 2582 const AES_KEY *key2, const unsigned char iv[16]); 2583 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out, 2584 size_t len, const AES_KEY *key1, 2585 const AES_KEY *key2, const unsigned char iv[16]); 2586 #endif 2587 2588 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \ 2589 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2590 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2591 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2592 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2593 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \ 2594 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \ 2595 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags) 2596 2597 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 2598 const unsigned char *iv, int enc) 2599 { 2600 int ret, mode; 2601 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2602 2603 mode = EVP_CIPHER_CTX_mode(ctx); 2604 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 2605 && !enc) { 2606 #ifdef HWAES_CAPABLE 2607 if (HWAES_CAPABLE) { 2608 ret = HWAES_set_decrypt_key(key, 2609 EVP_CIPHER_CTX_key_length(ctx) * 8, 2610 &dat->ks.ks); 2611 dat->block = (block128_f) HWAES_decrypt; 2612 dat->stream.cbc = NULL; 2613 # ifdef HWAES_cbc_encrypt 2614 if (mode == EVP_CIPH_CBC_MODE) 2615 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt; 2616 # endif 2617 } else 2618 #endif 2619 #ifdef BSAES_CAPABLE 2620 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) { 2621 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2622 &dat->ks.ks); 2623 dat->block = (block128_f) AES_decrypt; 2624 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt; 2625 } else 2626 #endif 2627 #ifdef VPAES_CAPABLE 2628 if (VPAES_CAPABLE) { 2629 ret = vpaes_set_decrypt_key(key, 2630 EVP_CIPHER_CTX_key_length(ctx) * 8, 2631 &dat->ks.ks); 2632 dat->block = (block128_f) vpaes_decrypt; 2633 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2634 (cbc128_f) vpaes_cbc_encrypt : NULL; 2635 } else 2636 #endif 2637 { 2638 ret = AES_set_decrypt_key(key, 2639 EVP_CIPHER_CTX_key_length(ctx) * 8, 2640 &dat->ks.ks); 2641 dat->block = (block128_f) AES_decrypt; 2642 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2643 (cbc128_f) AES_cbc_encrypt : NULL; 2644 } 2645 } else 2646 #ifdef HWAES_CAPABLE 2647 if (HWAES_CAPABLE) { 2648 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2649 &dat->ks.ks); 2650 dat->block = (block128_f) HWAES_encrypt; 2651 dat->stream.cbc = NULL; 2652 # ifdef HWAES_cbc_encrypt 2653 if (mode == EVP_CIPH_CBC_MODE) 2654 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt; 2655 else 2656 # endif 2657 # ifdef HWAES_ctr32_encrypt_blocks 2658 if (mode == EVP_CIPH_CTR_MODE) 2659 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; 2660 else 2661 # endif 2662 (void)0; /* terminate potentially open 'else' */ 2663 } else 2664 #endif 2665 #ifdef BSAES_CAPABLE 2666 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) { 2667 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2668 &dat->ks.ks); 2669 dat->block = (block128_f) AES_encrypt; 2670 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; 2671 } else 2672 #endif 2673 #ifdef VPAES_CAPABLE 2674 if (VPAES_CAPABLE) { 2675 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2676 &dat->ks.ks); 2677 dat->block = (block128_f) vpaes_encrypt; 2678 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2679 (cbc128_f) vpaes_cbc_encrypt : NULL; 2680 } else 2681 #endif 2682 { 2683 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2684 &dat->ks.ks); 2685 dat->block = (block128_f) AES_encrypt; 2686 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2687 (cbc128_f) AES_cbc_encrypt : NULL; 2688 #ifdef AES_CTR_ASM 2689 if (mode == EVP_CIPH_CTR_MODE) 2690 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt; 2691 #endif 2692 } 2693 2694 if (ret < 0) { 2695 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 2696 return 0; 2697 } 2698 2699 return 1; 2700 } 2701 2702 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2703 const unsigned char *in, size_t len) 2704 { 2705 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2706 2707 if (dat->stream.cbc) 2708 (*dat->stream.cbc) (in, out, len, &dat->ks, 2709 EVP_CIPHER_CTX_iv_noconst(ctx), 2710 EVP_CIPHER_CTX_encrypting(ctx)); 2711 else if (EVP_CIPHER_CTX_encrypting(ctx)) 2712 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, 2713 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block); 2714 else 2715 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, 2716 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block); 2717 2718 return 1; 2719 } 2720 2721 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2722 const unsigned char *in, size_t len) 2723 { 2724 size_t bl = EVP_CIPHER_CTX_block_size(ctx); 2725 size_t i; 2726 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2727 2728 if (len < bl) 2729 return 1; 2730 2731 for (i = 0, len -= bl; i <= len; i += bl) 2732 (*dat->block) (in + i, out + i, &dat->ks); 2733 2734 return 1; 2735 } 2736 2737 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2738 const unsigned char *in, size_t len) 2739 { 2740 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2741 2742 int num = EVP_CIPHER_CTX_num(ctx); 2743 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, 2744 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block); 2745 EVP_CIPHER_CTX_set_num(ctx, num); 2746 return 1; 2747 } 2748 2749 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2750 const unsigned char *in, size_t len) 2751 { 2752 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2753 2754 int num = EVP_CIPHER_CTX_num(ctx); 2755 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks, 2756 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2757 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2758 EVP_CIPHER_CTX_set_num(ctx, num); 2759 return 1; 2760 } 2761 2762 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2763 const unsigned char *in, size_t len) 2764 { 2765 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2766 2767 int num = EVP_CIPHER_CTX_num(ctx); 2768 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks, 2769 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2770 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2771 EVP_CIPHER_CTX_set_num(ctx, num); 2772 return 1; 2773 } 2774 2775 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2776 const unsigned char *in, size_t len) 2777 { 2778 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2779 2780 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) { 2781 int num = EVP_CIPHER_CTX_num(ctx); 2782 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks, 2783 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2784 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2785 EVP_CIPHER_CTX_set_num(ctx, num); 2786 return 1; 2787 } 2788 2789 while (len >= MAXBITCHUNK) { 2790 int num = EVP_CIPHER_CTX_num(ctx); 2791 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks, 2792 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2793 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2794 EVP_CIPHER_CTX_set_num(ctx, num); 2795 len -= MAXBITCHUNK; 2796 out += MAXBITCHUNK; 2797 in += MAXBITCHUNK; 2798 } 2799 if (len) { 2800 int num = EVP_CIPHER_CTX_num(ctx); 2801 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks, 2802 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2803 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2804 EVP_CIPHER_CTX_set_num(ctx, num); 2805 } 2806 2807 return 1; 2808 } 2809 2810 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2811 const unsigned char *in, size_t len) 2812 { 2813 unsigned int num = EVP_CIPHER_CTX_num(ctx); 2814 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2815 2816 if (dat->stream.ctr) 2817 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, 2818 EVP_CIPHER_CTX_iv_noconst(ctx), 2819 EVP_CIPHER_CTX_buf_noconst(ctx), 2820 &num, dat->stream.ctr); 2821 else 2822 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, 2823 EVP_CIPHER_CTX_iv_noconst(ctx), 2824 EVP_CIPHER_CTX_buf_noconst(ctx), &num, 2825 dat->block); 2826 EVP_CIPHER_CTX_set_num(ctx, num); 2827 return 1; 2828 } 2829 2830 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0) 2831 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0) 2832 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0) 2833 2834 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c) 2835 { 2836 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c); 2837 if (gctx == NULL) 2838 return 0; 2839 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm)); 2840 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c)) 2841 OPENSSL_free(gctx->iv); 2842 return 1; 2843 } 2844 2845 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 2846 { 2847 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c); 2848 switch (type) { 2849 case EVP_CTRL_INIT: 2850 gctx->key_set = 0; 2851 gctx->iv_set = 0; 2852 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher); 2853 gctx->iv = c->iv; 2854 gctx->taglen = -1; 2855 gctx->iv_gen = 0; 2856 gctx->tls_aad_len = -1; 2857 return 1; 2858 2859 case EVP_CTRL_GET_IVLEN: 2860 *(int *)ptr = gctx->ivlen; 2861 return 1; 2862 2863 case EVP_CTRL_AEAD_SET_IVLEN: 2864 if (arg <= 0) 2865 return 0; 2866 /* Allocate memory for IV if needed */ 2867 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) { 2868 if (gctx->iv != c->iv) 2869 OPENSSL_free(gctx->iv); 2870 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) { 2871 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 2872 return 0; 2873 } 2874 } 2875 gctx->ivlen = arg; 2876 return 1; 2877 2878 case EVP_CTRL_AEAD_SET_TAG: 2879 if (arg <= 0 || arg > 16 || c->encrypt) 2880 return 0; 2881 memcpy(c->buf, ptr, arg); 2882 gctx->taglen = arg; 2883 return 1; 2884 2885 case EVP_CTRL_AEAD_GET_TAG: 2886 if (arg <= 0 || arg > 16 || !c->encrypt 2887 || gctx->taglen < 0) 2888 return 0; 2889 memcpy(ptr, c->buf, arg); 2890 return 1; 2891 2892 case EVP_CTRL_GET_IV: 2893 if (gctx->iv_gen != 1) 2894 return 0; 2895 if (gctx->ivlen != arg) 2896 return 0; 2897 memcpy(ptr, gctx->iv, arg); 2898 return 1; 2899 2900 case EVP_CTRL_GCM_SET_IV_FIXED: 2901 /* Special case: -1 length restores whole IV */ 2902 if (arg == -1) { 2903 memcpy(gctx->iv, ptr, gctx->ivlen); 2904 gctx->iv_gen = 1; 2905 return 1; 2906 } 2907 /* 2908 * Fixed field must be at least 4 bytes and invocation field at least 2909 * 8. 2910 */ 2911 if ((arg < 4) || (gctx->ivlen - arg) < 8) 2912 return 0; 2913 if (arg) 2914 memcpy(gctx->iv, ptr, arg); 2915 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) 2916 return 0; 2917 gctx->iv_gen = 1; 2918 return 1; 2919 2920 case EVP_CTRL_GCM_IV_GEN: 2921 if (gctx->iv_gen == 0 || gctx->key_set == 0) 2922 return 0; 2923 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); 2924 if (arg <= 0 || arg > gctx->ivlen) 2925 arg = gctx->ivlen; 2926 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); 2927 /* 2928 * Invocation field will be at least 8 bytes in size and so no need 2929 * to check wrap around or increment more than last 8 bytes. 2930 */ 2931 ctr64_inc(gctx->iv + gctx->ivlen - 8); 2932 gctx->iv_set = 1; 2933 return 1; 2934 2935 case EVP_CTRL_GCM_SET_IV_INV: 2936 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) 2937 return 0; 2938 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); 2939 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); 2940 gctx->iv_set = 1; 2941 return 1; 2942 2943 case EVP_CTRL_AEAD_TLS1_AAD: 2944 /* Save the AAD for later use */ 2945 if (arg != EVP_AEAD_TLS1_AAD_LEN) 2946 return 0; 2947 memcpy(c->buf, ptr, arg); 2948 gctx->tls_aad_len = arg; 2949 { 2950 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1]; 2951 /* Correct length for explicit IV */ 2952 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) 2953 return 0; 2954 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; 2955 /* If decrypting correct for tag too */ 2956 if (!c->encrypt) { 2957 if (len < EVP_GCM_TLS_TAG_LEN) 2958 return 0; 2959 len -= EVP_GCM_TLS_TAG_LEN; 2960 } 2961 c->buf[arg - 2] = len >> 8; 2962 c->buf[arg - 1] = len & 0xff; 2963 } 2964 /* Extra padding: tag appended to record */ 2965 return EVP_GCM_TLS_TAG_LEN; 2966 2967 case EVP_CTRL_COPY: 2968 { 2969 EVP_CIPHER_CTX *out = ptr; 2970 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out); 2971 if (gctx->gcm.key) { 2972 if (gctx->gcm.key != &gctx->ks) 2973 return 0; 2974 gctx_out->gcm.key = &gctx_out->ks; 2975 } 2976 if (gctx->iv == c->iv) 2977 gctx_out->iv = out->iv; 2978 else { 2979 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) { 2980 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 2981 return 0; 2982 } 2983 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen); 2984 } 2985 return 1; 2986 } 2987 2988 default: 2989 return -1; 2990 2991 } 2992 } 2993 2994 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 2995 const unsigned char *iv, int enc) 2996 { 2997 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 2998 if (!iv && !key) 2999 return 1; 3000 if (key) { 3001 do { 3002 #ifdef HWAES_CAPABLE 3003 if (HWAES_CAPABLE) { 3004 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 3005 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 3006 (block128_f) HWAES_encrypt); 3007 # ifdef HWAES_ctr32_encrypt_blocks 3008 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; 3009 # else 3010 gctx->ctr = NULL; 3011 # endif 3012 break; 3013 } else 3014 #endif 3015 #ifdef BSAES_CAPABLE 3016 if (BSAES_CAPABLE) { 3017 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 3018 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 3019 (block128_f) AES_encrypt); 3020 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; 3021 break; 3022 } else 3023 #endif 3024 #ifdef VPAES_CAPABLE 3025 if (VPAES_CAPABLE) { 3026 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 3027 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 3028 (block128_f) vpaes_encrypt); 3029 gctx->ctr = NULL; 3030 break; 3031 } else 3032 #endif 3033 (void)0; /* terminate potentially open 'else' */ 3034 3035 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 3036 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 3037 (block128_f) AES_encrypt); 3038 #ifdef AES_CTR_ASM 3039 gctx->ctr = (ctr128_f) AES_ctr32_encrypt; 3040 #else 3041 gctx->ctr = NULL; 3042 #endif 3043 } while (0); 3044 3045 /* 3046 * If we have an iv can set it directly, otherwise use saved IV. 3047 */ 3048 if (iv == NULL && gctx->iv_set) 3049 iv = gctx->iv; 3050 if (iv) { 3051 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 3052 gctx->iv_set = 1; 3053 } 3054 gctx->key_set = 1; 3055 } else { 3056 /* If key set use IV, otherwise copy */ 3057 if (gctx->key_set) 3058 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 3059 else 3060 memcpy(gctx->iv, iv, gctx->ivlen); 3061 gctx->iv_set = 1; 3062 gctx->iv_gen = 0; 3063 } 3064 return 1; 3065 } 3066 3067 /* 3068 * Handle TLS GCM packet format. This consists of the last portion of the IV 3069 * followed by the payload and finally the tag. On encrypt generate IV, 3070 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload 3071 * and verify tag. 3072 */ 3073 3074 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3075 const unsigned char *in, size_t len) 3076 { 3077 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 3078 int rv = -1; 3079 /* Encrypt/decrypt must be performed in place */ 3080 if (out != in 3081 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN)) 3082 return -1; 3083 /* 3084 * Set IV from start of buffer or generate IV and write to start of 3085 * buffer. 3086 */ 3087 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN 3088 : EVP_CTRL_GCM_SET_IV_INV, 3089 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) 3090 goto err; 3091 /* Use saved AAD */ 3092 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len)) 3093 goto err; 3094 /* Fix buffer and length to point to payload */ 3095 in += EVP_GCM_TLS_EXPLICIT_IV_LEN; 3096 out += EVP_GCM_TLS_EXPLICIT_IV_LEN; 3097 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 3098 if (ctx->encrypt) { 3099 /* Encrypt payload */ 3100 if (gctx->ctr) { 3101 size_t bulk = 0; 3102 #if defined(AES_GCM_ASM) 3103 if (len >= 32 && AES_GCM_ASM(gctx)) { 3104 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) 3105 return -1; 3106 3107 bulk = AES_gcm_encrypt(in, out, len, 3108 gctx->gcm.key, 3109 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3110 gctx->gcm.len.u[1] += bulk; 3111 } 3112 #endif 3113 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, 3114 in + bulk, 3115 out + bulk, 3116 len - bulk, gctx->ctr)) 3117 goto err; 3118 } else { 3119 size_t bulk = 0; 3120 #if defined(AES_GCM_ASM2) 3121 if (len >= 32 && AES_GCM_ASM2(gctx)) { 3122 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) 3123 return -1; 3124 3125 bulk = AES_gcm_encrypt(in, out, len, 3126 gctx->gcm.key, 3127 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3128 gctx->gcm.len.u[1] += bulk; 3129 } 3130 #endif 3131 if (CRYPTO_gcm128_encrypt(&gctx->gcm, 3132 in + bulk, out + bulk, len - bulk)) 3133 goto err; 3134 } 3135 out += len; 3136 /* Finally write tag */ 3137 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN); 3138 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 3139 } else { 3140 /* Decrypt */ 3141 if (gctx->ctr) { 3142 size_t bulk = 0; 3143 #if defined(AES_GCM_ASM) 3144 if (len >= 16 && AES_GCM_ASM(gctx)) { 3145 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) 3146 return -1; 3147 3148 bulk = AES_gcm_decrypt(in, out, len, 3149 gctx->gcm.key, 3150 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3151 gctx->gcm.len.u[1] += bulk; 3152 } 3153 #endif 3154 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, 3155 in + bulk, 3156 out + bulk, 3157 len - bulk, gctx->ctr)) 3158 goto err; 3159 } else { 3160 size_t bulk = 0; 3161 #if defined(AES_GCM_ASM2) 3162 if (len >= 16 && AES_GCM_ASM2(gctx)) { 3163 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) 3164 return -1; 3165 3166 bulk = AES_gcm_decrypt(in, out, len, 3167 gctx->gcm.key, 3168 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3169 gctx->gcm.len.u[1] += bulk; 3170 } 3171 #endif 3172 if (CRYPTO_gcm128_decrypt(&gctx->gcm, 3173 in + bulk, out + bulk, len - bulk)) 3174 goto err; 3175 } 3176 /* Retrieve tag */ 3177 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN); 3178 /* If tag mismatch wipe buffer */ 3179 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) { 3180 OPENSSL_cleanse(out, len); 3181 goto err; 3182 } 3183 rv = len; 3184 } 3185 3186 err: 3187 gctx->iv_set = 0; 3188 gctx->tls_aad_len = -1; 3189 return rv; 3190 } 3191 3192 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3193 const unsigned char *in, size_t len) 3194 { 3195 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 3196 /* If not set up, return error */ 3197 if (!gctx->key_set) 3198 return -1; 3199 3200 if (gctx->tls_aad_len >= 0) 3201 return aes_gcm_tls_cipher(ctx, out, in, len); 3202 3203 if (!gctx->iv_set) 3204 return -1; 3205 if (in) { 3206 if (out == NULL) { 3207 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len)) 3208 return -1; 3209 } else if (ctx->encrypt) { 3210 if (gctx->ctr) { 3211 size_t bulk = 0; 3212 #if defined(AES_GCM_ASM) 3213 if (len >= 32 && AES_GCM_ASM(gctx)) { 3214 size_t res = (16 - gctx->gcm.mres) % 16; 3215 3216 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) 3217 return -1; 3218 3219 bulk = AES_gcm_encrypt(in + res, 3220 out + res, len - res, 3221 gctx->gcm.key, gctx->gcm.Yi.c, 3222 gctx->gcm.Xi.u); 3223 gctx->gcm.len.u[1] += bulk; 3224 bulk += res; 3225 } 3226 #endif 3227 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, 3228 in + bulk, 3229 out + bulk, 3230 len - bulk, gctx->ctr)) 3231 return -1; 3232 } else { 3233 size_t bulk = 0; 3234 #if defined(AES_GCM_ASM2) 3235 if (len >= 32 && AES_GCM_ASM2(gctx)) { 3236 size_t res = (16 - gctx->gcm.mres) % 16; 3237 3238 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) 3239 return -1; 3240 3241 bulk = AES_gcm_encrypt(in + res, 3242 out + res, len - res, 3243 gctx->gcm.key, gctx->gcm.Yi.c, 3244 gctx->gcm.Xi.u); 3245 gctx->gcm.len.u[1] += bulk; 3246 bulk += res; 3247 } 3248 #endif 3249 if (CRYPTO_gcm128_encrypt(&gctx->gcm, 3250 in + bulk, out + bulk, len - bulk)) 3251 return -1; 3252 } 3253 } else { 3254 if (gctx->ctr) { 3255 size_t bulk = 0; 3256 #if defined(AES_GCM_ASM) 3257 if (len >= 16 && AES_GCM_ASM(gctx)) { 3258 size_t res = (16 - gctx->gcm.mres) % 16; 3259 3260 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) 3261 return -1; 3262 3263 bulk = AES_gcm_decrypt(in + res, 3264 out + res, len - res, 3265 gctx->gcm.key, 3266 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3267 gctx->gcm.len.u[1] += bulk; 3268 bulk += res; 3269 } 3270 #endif 3271 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, 3272 in + bulk, 3273 out + bulk, 3274 len - bulk, gctx->ctr)) 3275 return -1; 3276 } else { 3277 size_t bulk = 0; 3278 #if defined(AES_GCM_ASM2) 3279 if (len >= 16 && AES_GCM_ASM2(gctx)) { 3280 size_t res = (16 - gctx->gcm.mres) % 16; 3281 3282 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) 3283 return -1; 3284 3285 bulk = AES_gcm_decrypt(in + res, 3286 out + res, len - res, 3287 gctx->gcm.key, 3288 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3289 gctx->gcm.len.u[1] += bulk; 3290 bulk += res; 3291 } 3292 #endif 3293 if (CRYPTO_gcm128_decrypt(&gctx->gcm, 3294 in + bulk, out + bulk, len - bulk)) 3295 return -1; 3296 } 3297 } 3298 return len; 3299 } else { 3300 if (!ctx->encrypt) { 3301 if (gctx->taglen < 0) 3302 return -1; 3303 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) 3304 return -1; 3305 gctx->iv_set = 0; 3306 return 0; 3307 } 3308 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16); 3309 gctx->taglen = 16; 3310 /* Don't reuse the IV */ 3311 gctx->iv_set = 0; 3312 return 0; 3313 } 3314 3315 } 3316 3317 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \ 3318 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ 3319 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ 3320 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH) 3321 3322 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM, 3323 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3324 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM, 3325 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3326 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM, 3327 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3328 3329 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3330 { 3331 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c); 3332 3333 if (type == EVP_CTRL_COPY) { 3334 EVP_CIPHER_CTX *out = ptr; 3335 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out); 3336 3337 if (xctx->xts.key1) { 3338 if (xctx->xts.key1 != &xctx->ks1) 3339 return 0; 3340 xctx_out->xts.key1 = &xctx_out->ks1; 3341 } 3342 if (xctx->xts.key2) { 3343 if (xctx->xts.key2 != &xctx->ks2) 3344 return 0; 3345 xctx_out->xts.key2 = &xctx_out->ks2; 3346 } 3347 return 1; 3348 } else if (type != EVP_CTRL_INIT) 3349 return -1; 3350 /* key1 and key2 are used as an indicator both key and IV are set */ 3351 xctx->xts.key1 = NULL; 3352 xctx->xts.key2 = NULL; 3353 return 1; 3354 } 3355 3356 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3357 const unsigned char *iv, int enc) 3358 { 3359 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 3360 3361 if (!iv && !key) 3362 return 1; 3363 3364 if (key) 3365 do { 3366 /* The key is two half length keys in reality */ 3367 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2; 3368 3369 /* 3370 * Verify that the two keys are different. 3371 * 3372 * This addresses the vulnerability described in Rogaway's 3373 * September 2004 paper: 3374 * 3375 * "Efficient Instantiations of Tweakable Blockciphers and 3376 * Refinements to Modes OCB and PMAC". 3377 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf) 3378 * 3379 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states 3380 * that: 3381 * "The check for Key_1 != Key_2 shall be done at any place 3382 * BEFORE using the keys in the XTS-AES algorithm to process 3383 * data with them." 3384 */ 3385 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) { 3386 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS); 3387 return 0; 3388 } 3389 3390 #ifdef AES_XTS_ASM 3391 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt; 3392 #else 3393 xctx->stream = NULL; 3394 #endif 3395 /* key_len is two AES keys */ 3396 #ifdef HWAES_CAPABLE 3397 if (HWAES_CAPABLE) { 3398 if (enc) { 3399 HWAES_set_encrypt_key(key, 3400 EVP_CIPHER_CTX_key_length(ctx) * 4, 3401 &xctx->ks1.ks); 3402 xctx->xts.block1 = (block128_f) HWAES_encrypt; 3403 # ifdef HWAES_xts_encrypt 3404 xctx->stream = HWAES_xts_encrypt; 3405 # endif 3406 } else { 3407 HWAES_set_decrypt_key(key, 3408 EVP_CIPHER_CTX_key_length(ctx) * 4, 3409 &xctx->ks1.ks); 3410 xctx->xts.block1 = (block128_f) HWAES_decrypt; 3411 # ifdef HWAES_xts_decrypt 3412 xctx->stream = HWAES_xts_decrypt; 3413 #endif 3414 } 3415 3416 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3417 EVP_CIPHER_CTX_key_length(ctx) * 4, 3418 &xctx->ks2.ks); 3419 xctx->xts.block2 = (block128_f) HWAES_encrypt; 3420 3421 xctx->xts.key1 = &xctx->ks1; 3422 break; 3423 } else 3424 #endif 3425 #ifdef BSAES_CAPABLE 3426 if (BSAES_CAPABLE) 3427 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt; 3428 else 3429 #endif 3430 #ifdef VPAES_CAPABLE 3431 if (VPAES_CAPABLE) { 3432 if (enc) { 3433 vpaes_set_encrypt_key(key, 3434 EVP_CIPHER_CTX_key_length(ctx) * 4, 3435 &xctx->ks1.ks); 3436 xctx->xts.block1 = (block128_f) vpaes_encrypt; 3437 } else { 3438 vpaes_set_decrypt_key(key, 3439 EVP_CIPHER_CTX_key_length(ctx) * 4, 3440 &xctx->ks1.ks); 3441 xctx->xts.block1 = (block128_f) vpaes_decrypt; 3442 } 3443 3444 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3445 EVP_CIPHER_CTX_key_length(ctx) * 4, 3446 &xctx->ks2.ks); 3447 xctx->xts.block2 = (block128_f) vpaes_encrypt; 3448 3449 xctx->xts.key1 = &xctx->ks1; 3450 break; 3451 } else 3452 #endif 3453 (void)0; /* terminate potentially open 'else' */ 3454 3455 if (enc) { 3456 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 3457 &xctx->ks1.ks); 3458 xctx->xts.block1 = (block128_f) AES_encrypt; 3459 } else { 3460 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 3461 &xctx->ks1.ks); 3462 xctx->xts.block1 = (block128_f) AES_decrypt; 3463 } 3464 3465 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3466 EVP_CIPHER_CTX_key_length(ctx) * 4, 3467 &xctx->ks2.ks); 3468 xctx->xts.block2 = (block128_f) AES_encrypt; 3469 3470 xctx->xts.key1 = &xctx->ks1; 3471 } while (0); 3472 3473 if (iv) { 3474 xctx->xts.key2 = &xctx->ks2; 3475 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 3476 } 3477 3478 return 1; 3479 } 3480 3481 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3482 const unsigned char *in, size_t len) 3483 { 3484 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 3485 if (!xctx->xts.key1 || !xctx->xts.key2) 3486 return 0; 3487 if (!out || !in || len < AES_BLOCK_SIZE) 3488 return 0; 3489 if (xctx->stream) 3490 (*xctx->stream) (in, out, len, 3491 xctx->xts.key1, xctx->xts.key2, 3492 EVP_CIPHER_CTX_iv_noconst(ctx)); 3493 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx), 3494 in, out, len, 3495 EVP_CIPHER_CTX_encrypting(ctx))) 3496 return 0; 3497 return 1; 3498 } 3499 3500 #define aes_xts_cleanup NULL 3501 3502 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \ 3503 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ 3504 | EVP_CIPH_CUSTOM_COPY) 3505 3506 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS) 3507 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS) 3508 3509 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3510 { 3511 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c); 3512 switch (type) { 3513 case EVP_CTRL_INIT: 3514 cctx->key_set = 0; 3515 cctx->iv_set = 0; 3516 cctx->L = 8; 3517 cctx->M = 12; 3518 cctx->tag_set = 0; 3519 cctx->len_set = 0; 3520 cctx->tls_aad_len = -1; 3521 return 1; 3522 case EVP_CTRL_GET_IVLEN: 3523 *(int *)ptr = 15 - cctx->L; 3524 return 1; 3525 case EVP_CTRL_AEAD_TLS1_AAD: 3526 /* Save the AAD for later use */ 3527 if (arg != EVP_AEAD_TLS1_AAD_LEN) 3528 return 0; 3529 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg); 3530 cctx->tls_aad_len = arg; 3531 { 3532 uint16_t len = 3533 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8 3534 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1]; 3535 /* Correct length for explicit IV */ 3536 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN) 3537 return 0; 3538 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN; 3539 /* If decrypting correct for tag too */ 3540 if (!EVP_CIPHER_CTX_encrypting(c)) { 3541 if (len < cctx->M) 3542 return 0; 3543 len -= cctx->M; 3544 } 3545 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8; 3546 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff; 3547 } 3548 /* Extra padding: tag appended to record */ 3549 return cctx->M; 3550 3551 case EVP_CTRL_CCM_SET_IV_FIXED: 3552 /* Sanity check length */ 3553 if (arg != EVP_CCM_TLS_FIXED_IV_LEN) 3554 return 0; 3555 /* Just copy to first part of IV */ 3556 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg); 3557 return 1; 3558 3559 case EVP_CTRL_AEAD_SET_IVLEN: 3560 arg = 15 - arg; 3561 /* fall thru */ 3562 case EVP_CTRL_CCM_SET_L: 3563 if (arg < 2 || arg > 8) 3564 return 0; 3565 cctx->L = arg; 3566 return 1; 3567 3568 case EVP_CTRL_AEAD_SET_TAG: 3569 if ((arg & 1) || arg < 4 || arg > 16) 3570 return 0; 3571 if (EVP_CIPHER_CTX_encrypting(c) && ptr) 3572 return 0; 3573 if (ptr) { 3574 cctx->tag_set = 1; 3575 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg); 3576 } 3577 cctx->M = arg; 3578 return 1; 3579 3580 case EVP_CTRL_AEAD_GET_TAG: 3581 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set) 3582 return 0; 3583 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg)) 3584 return 0; 3585 cctx->tag_set = 0; 3586 cctx->iv_set = 0; 3587 cctx->len_set = 0; 3588 return 1; 3589 3590 case EVP_CTRL_COPY: 3591 { 3592 EVP_CIPHER_CTX *out = ptr; 3593 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out); 3594 if (cctx->ccm.key) { 3595 if (cctx->ccm.key != &cctx->ks) 3596 return 0; 3597 cctx_out->ccm.key = &cctx_out->ks; 3598 } 3599 return 1; 3600 } 3601 3602 default: 3603 return -1; 3604 3605 } 3606 } 3607 3608 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3609 const unsigned char *iv, int enc) 3610 { 3611 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3612 if (!iv && !key) 3613 return 1; 3614 if (key) 3615 do { 3616 #ifdef HWAES_CAPABLE 3617 if (HWAES_CAPABLE) { 3618 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3619 &cctx->ks.ks); 3620 3621 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3622 &cctx->ks, (block128_f) HWAES_encrypt); 3623 cctx->str = NULL; 3624 cctx->key_set = 1; 3625 break; 3626 } else 3627 #endif 3628 #ifdef VPAES_CAPABLE 3629 if (VPAES_CAPABLE) { 3630 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3631 &cctx->ks.ks); 3632 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3633 &cctx->ks, (block128_f) vpaes_encrypt); 3634 cctx->str = NULL; 3635 cctx->key_set = 1; 3636 break; 3637 } 3638 #endif 3639 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3640 &cctx->ks.ks); 3641 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3642 &cctx->ks, (block128_f) AES_encrypt); 3643 cctx->str = NULL; 3644 cctx->key_set = 1; 3645 } while (0); 3646 if (iv) { 3647 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 3648 cctx->iv_set = 1; 3649 } 3650 return 1; 3651 } 3652 3653 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3654 const unsigned char *in, size_t len) 3655 { 3656 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3657 CCM128_CONTEXT *ccm = &cctx->ccm; 3658 /* Encrypt/decrypt must be performed in place */ 3659 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M)) 3660 return -1; 3661 /* If encrypting set explicit IV from sequence number (start of AAD) */ 3662 if (EVP_CIPHER_CTX_encrypting(ctx)) 3663 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx), 3664 EVP_CCM_TLS_EXPLICIT_IV_LEN); 3665 /* Get rest of IV from explicit IV */ 3666 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in, 3667 EVP_CCM_TLS_EXPLICIT_IV_LEN); 3668 /* Correct length value */ 3669 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M; 3670 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L, 3671 len)) 3672 return -1; 3673 /* Use saved AAD */ 3674 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len); 3675 /* Fix buffer to point to payload */ 3676 in += EVP_CCM_TLS_EXPLICIT_IV_LEN; 3677 out += EVP_CCM_TLS_EXPLICIT_IV_LEN; 3678 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3679 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, 3680 cctx->str) : 3681 CRYPTO_ccm128_encrypt(ccm, in, out, len)) 3682 return -1; 3683 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M)) 3684 return -1; 3685 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M; 3686 } else { 3687 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, 3688 cctx->str) : 3689 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) { 3690 unsigned char tag[16]; 3691 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) { 3692 if (!CRYPTO_memcmp(tag, in + len, cctx->M)) 3693 return len; 3694 } 3695 } 3696 OPENSSL_cleanse(out, len); 3697 return -1; 3698 } 3699 } 3700 3701 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3702 const unsigned char *in, size_t len) 3703 { 3704 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3705 CCM128_CONTEXT *ccm = &cctx->ccm; 3706 /* If not set up, return error */ 3707 if (!cctx->key_set) 3708 return -1; 3709 3710 if (cctx->tls_aad_len >= 0) 3711 return aes_ccm_tls_cipher(ctx, out, in, len); 3712 3713 /* EVP_*Final() doesn't return any data */ 3714 if (in == NULL && out != NULL) 3715 return 0; 3716 3717 if (!cctx->iv_set) 3718 return -1; 3719 3720 if (!out) { 3721 if (!in) { 3722 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 3723 15 - cctx->L, len)) 3724 return -1; 3725 cctx->len_set = 1; 3726 return len; 3727 } 3728 /* If have AAD need message length */ 3729 if (!cctx->len_set && len) 3730 return -1; 3731 CRYPTO_ccm128_aad(ccm, in, len); 3732 return len; 3733 } 3734 3735 /* The tag must be set before actually decrypting data */ 3736 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set) 3737 return -1; 3738 3739 /* If not set length yet do it */ 3740 if (!cctx->len_set) { 3741 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 3742 15 - cctx->L, len)) 3743 return -1; 3744 cctx->len_set = 1; 3745 } 3746 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3747 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, 3748 cctx->str) : 3749 CRYPTO_ccm128_encrypt(ccm, in, out, len)) 3750 return -1; 3751 cctx->tag_set = 1; 3752 return len; 3753 } else { 3754 int rv = -1; 3755 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, 3756 cctx->str) : 3757 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) { 3758 unsigned char tag[16]; 3759 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) { 3760 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx), 3761 cctx->M)) 3762 rv = len; 3763 } 3764 } 3765 if (rv == -1) 3766 OPENSSL_cleanse(out, len); 3767 cctx->iv_set = 0; 3768 cctx->tag_set = 0; 3769 cctx->len_set = 0; 3770 return rv; 3771 } 3772 } 3773 3774 #define aes_ccm_cleanup NULL 3775 3776 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM, 3777 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3778 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM, 3779 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3780 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM, 3781 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3782 3783 typedef struct { 3784 union { 3785 double align; 3786 AES_KEY ks; 3787 } ks; 3788 /* Indicates if IV has been set */ 3789 unsigned char *iv; 3790 } EVP_AES_WRAP_CTX; 3791 3792 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3793 const unsigned char *iv, int enc) 3794 { 3795 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx); 3796 if (!iv && !key) 3797 return 1; 3798 if (key) { 3799 if (EVP_CIPHER_CTX_encrypting(ctx)) 3800 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3801 &wctx->ks.ks); 3802 else 3803 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3804 &wctx->ks.ks); 3805 if (!iv) 3806 wctx->iv = NULL; 3807 } 3808 if (iv) { 3809 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx)); 3810 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx); 3811 } 3812 return 1; 3813 } 3814 3815 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3816 const unsigned char *in, size_t inlen) 3817 { 3818 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx); 3819 size_t rv; 3820 /* AES wrap with padding has IV length of 4, without padding 8 */ 3821 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4; 3822 /* No final operation so always return zero length */ 3823 if (!in) 3824 return 0; 3825 /* Input length must always be non-zero */ 3826 if (!inlen) 3827 return -1; 3828 /* If decrypting need at least 16 bytes and multiple of 8 */ 3829 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7)) 3830 return -1; 3831 /* If not padding input must be multiple of 8 */ 3832 if (!pad && inlen & 0x7) 3833 return -1; 3834 if (is_partially_overlapping(out, in, inlen)) { 3835 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING); 3836 return 0; 3837 } 3838 if (!out) { 3839 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3840 /* If padding round up to multiple of 8 */ 3841 if (pad) 3842 inlen = (inlen + 7) / 8 * 8; 3843 /* 8 byte prefix */ 3844 return inlen + 8; 3845 } else { 3846 /* 3847 * If not padding output will be exactly 8 bytes smaller than 3848 * input. If padding it will be at least 8 bytes smaller but we 3849 * don't know how much. 3850 */ 3851 return inlen - 8; 3852 } 3853 } 3854 if (pad) { 3855 if (EVP_CIPHER_CTX_encrypting(ctx)) 3856 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv, 3857 out, in, inlen, 3858 (block128_f) AES_encrypt); 3859 else 3860 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv, 3861 out, in, inlen, 3862 (block128_f) AES_decrypt); 3863 } else { 3864 if (EVP_CIPHER_CTX_encrypting(ctx)) 3865 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv, 3866 out, in, inlen, (block128_f) AES_encrypt); 3867 else 3868 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv, 3869 out, in, inlen, (block128_f) AES_decrypt); 3870 } 3871 return rv ? (int)rv : -1; 3872 } 3873 3874 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \ 3875 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ 3876 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1) 3877 3878 static const EVP_CIPHER aes_128_wrap = { 3879 NID_id_aes128_wrap, 3880 8, 16, 8, WRAP_FLAGS, 3881 aes_wrap_init_key, aes_wrap_cipher, 3882 NULL, 3883 sizeof(EVP_AES_WRAP_CTX), 3884 NULL, NULL, NULL, NULL 3885 }; 3886 3887 const EVP_CIPHER *EVP_aes_128_wrap(void) 3888 { 3889 return &aes_128_wrap; 3890 } 3891 3892 static const EVP_CIPHER aes_192_wrap = { 3893 NID_id_aes192_wrap, 3894 8, 24, 8, WRAP_FLAGS, 3895 aes_wrap_init_key, aes_wrap_cipher, 3896 NULL, 3897 sizeof(EVP_AES_WRAP_CTX), 3898 NULL, NULL, NULL, NULL 3899 }; 3900 3901 const EVP_CIPHER *EVP_aes_192_wrap(void) 3902 { 3903 return &aes_192_wrap; 3904 } 3905 3906 static const EVP_CIPHER aes_256_wrap = { 3907 NID_id_aes256_wrap, 3908 8, 32, 8, WRAP_FLAGS, 3909 aes_wrap_init_key, aes_wrap_cipher, 3910 NULL, 3911 sizeof(EVP_AES_WRAP_CTX), 3912 NULL, NULL, NULL, NULL 3913 }; 3914 3915 const EVP_CIPHER *EVP_aes_256_wrap(void) 3916 { 3917 return &aes_256_wrap; 3918 } 3919 3920 static const EVP_CIPHER aes_128_wrap_pad = { 3921 NID_id_aes128_wrap_pad, 3922 8, 16, 4, WRAP_FLAGS, 3923 aes_wrap_init_key, aes_wrap_cipher, 3924 NULL, 3925 sizeof(EVP_AES_WRAP_CTX), 3926 NULL, NULL, NULL, NULL 3927 }; 3928 3929 const EVP_CIPHER *EVP_aes_128_wrap_pad(void) 3930 { 3931 return &aes_128_wrap_pad; 3932 } 3933 3934 static const EVP_CIPHER aes_192_wrap_pad = { 3935 NID_id_aes192_wrap_pad, 3936 8, 24, 4, WRAP_FLAGS, 3937 aes_wrap_init_key, aes_wrap_cipher, 3938 NULL, 3939 sizeof(EVP_AES_WRAP_CTX), 3940 NULL, NULL, NULL, NULL 3941 }; 3942 3943 const EVP_CIPHER *EVP_aes_192_wrap_pad(void) 3944 { 3945 return &aes_192_wrap_pad; 3946 } 3947 3948 static const EVP_CIPHER aes_256_wrap_pad = { 3949 NID_id_aes256_wrap_pad, 3950 8, 32, 4, WRAP_FLAGS, 3951 aes_wrap_init_key, aes_wrap_cipher, 3952 NULL, 3953 sizeof(EVP_AES_WRAP_CTX), 3954 NULL, NULL, NULL, NULL 3955 }; 3956 3957 const EVP_CIPHER *EVP_aes_256_wrap_pad(void) 3958 { 3959 return &aes_256_wrap_pad; 3960 } 3961 3962 #ifndef OPENSSL_NO_OCB 3963 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3964 { 3965 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c); 3966 EVP_CIPHER_CTX *newc; 3967 EVP_AES_OCB_CTX *new_octx; 3968 3969 switch (type) { 3970 case EVP_CTRL_INIT: 3971 octx->key_set = 0; 3972 octx->iv_set = 0; 3973 octx->ivlen = EVP_CIPHER_iv_length(c->cipher); 3974 octx->iv = EVP_CIPHER_CTX_iv_noconst(c); 3975 octx->taglen = 16; 3976 octx->data_buf_len = 0; 3977 octx->aad_buf_len = 0; 3978 return 1; 3979 3980 case EVP_CTRL_GET_IVLEN: 3981 *(int *)ptr = octx->ivlen; 3982 return 1; 3983 3984 case EVP_CTRL_AEAD_SET_IVLEN: 3985 /* IV len must be 1 to 15 */ 3986 if (arg <= 0 || arg > 15) 3987 return 0; 3988 3989 octx->ivlen = arg; 3990 return 1; 3991 3992 case EVP_CTRL_AEAD_SET_TAG: 3993 if (!ptr) { 3994 /* Tag len must be 0 to 16 */ 3995 if (arg < 0 || arg > 16) 3996 return 0; 3997 3998 octx->taglen = arg; 3999 return 1; 4000 } 4001 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c)) 4002 return 0; 4003 memcpy(octx->tag, ptr, arg); 4004 return 1; 4005 4006 case EVP_CTRL_AEAD_GET_TAG: 4007 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c)) 4008 return 0; 4009 4010 memcpy(ptr, octx->tag, arg); 4011 return 1; 4012 4013 case EVP_CTRL_COPY: 4014 newc = (EVP_CIPHER_CTX *)ptr; 4015 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc); 4016 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb, 4017 &new_octx->ksenc.ks, 4018 &new_octx->ksdec.ks); 4019 4020 default: 4021 return -1; 4022 4023 } 4024 } 4025 4026 # ifdef HWAES_CAPABLE 4027 # ifdef HWAES_ocb_encrypt 4028 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out, 4029 size_t blocks, const void *key, 4030 size_t start_block_num, 4031 unsigned char offset_i[16], 4032 const unsigned char L_[][16], 4033 unsigned char checksum[16]); 4034 # else 4035 # define HWAES_ocb_encrypt ((ocb128_f)NULL) 4036 # endif 4037 # ifdef HWAES_ocb_decrypt 4038 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out, 4039 size_t blocks, const void *key, 4040 size_t start_block_num, 4041 unsigned char offset_i[16], 4042 const unsigned char L_[][16], 4043 unsigned char checksum[16]); 4044 # else 4045 # define HWAES_ocb_decrypt ((ocb128_f)NULL) 4046 # endif 4047 # endif 4048 4049 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 4050 const unsigned char *iv, int enc) 4051 { 4052 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 4053 if (!iv && !key) 4054 return 1; 4055 if (key) { 4056 do { 4057 /* 4058 * We set both the encrypt and decrypt key here because decrypt 4059 * needs both. We could possibly optimise to remove setting the 4060 * decrypt for an encryption operation. 4061 */ 4062 # ifdef HWAES_CAPABLE 4063 if (HWAES_CAPABLE) { 4064 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4065 &octx->ksenc.ks); 4066 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4067 &octx->ksdec.ks); 4068 if (!CRYPTO_ocb128_init(&octx->ocb, 4069 &octx->ksenc.ks, &octx->ksdec.ks, 4070 (block128_f) HWAES_encrypt, 4071 (block128_f) HWAES_decrypt, 4072 enc ? HWAES_ocb_encrypt 4073 : HWAES_ocb_decrypt)) 4074 return 0; 4075 break; 4076 } 4077 # endif 4078 # ifdef VPAES_CAPABLE 4079 if (VPAES_CAPABLE) { 4080 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4081 &octx->ksenc.ks); 4082 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4083 &octx->ksdec.ks); 4084 if (!CRYPTO_ocb128_init(&octx->ocb, 4085 &octx->ksenc.ks, &octx->ksdec.ks, 4086 (block128_f) vpaes_encrypt, 4087 (block128_f) vpaes_decrypt, 4088 NULL)) 4089 return 0; 4090 break; 4091 } 4092 # endif 4093 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4094 &octx->ksenc.ks); 4095 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4096 &octx->ksdec.ks); 4097 if (!CRYPTO_ocb128_init(&octx->ocb, 4098 &octx->ksenc.ks, &octx->ksdec.ks, 4099 (block128_f) AES_encrypt, 4100 (block128_f) AES_decrypt, 4101 NULL)) 4102 return 0; 4103 } 4104 while (0); 4105 4106 /* 4107 * If we have an iv we can set it directly, otherwise use saved IV. 4108 */ 4109 if (iv == NULL && octx->iv_set) 4110 iv = octx->iv; 4111 if (iv) { 4112 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 4113 != 1) 4114 return 0; 4115 octx->iv_set = 1; 4116 } 4117 octx->key_set = 1; 4118 } else { 4119 /* If key set use IV, otherwise copy */ 4120 if (octx->key_set) 4121 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 4122 else 4123 memcpy(octx->iv, iv, octx->ivlen); 4124 octx->iv_set = 1; 4125 } 4126 return 1; 4127 } 4128 4129 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 4130 const unsigned char *in, size_t len) 4131 { 4132 unsigned char *buf; 4133 int *buf_len; 4134 int written_len = 0; 4135 size_t trailing_len; 4136 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 4137 4138 /* If IV or Key not set then return error */ 4139 if (!octx->iv_set) 4140 return -1; 4141 4142 if (!octx->key_set) 4143 return -1; 4144 4145 if (in != NULL) { 4146 /* 4147 * Need to ensure we are only passing full blocks to low level OCB 4148 * routines. We do it here rather than in EVP_EncryptUpdate/ 4149 * EVP_DecryptUpdate because we need to pass full blocks of AAD too 4150 * and those routines don't support that 4151 */ 4152 4153 /* Are we dealing with AAD or normal data here? */ 4154 if (out == NULL) { 4155 buf = octx->aad_buf; 4156 buf_len = &(octx->aad_buf_len); 4157 } else { 4158 buf = octx->data_buf; 4159 buf_len = &(octx->data_buf_len); 4160 4161 if (is_partially_overlapping(out + *buf_len, in, len)) { 4162 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING); 4163 return 0; 4164 } 4165 } 4166 4167 /* 4168 * If we've got a partially filled buffer from a previous call then 4169 * use that data first 4170 */ 4171 if (*buf_len > 0) { 4172 unsigned int remaining; 4173 4174 remaining = AES_BLOCK_SIZE - (*buf_len); 4175 if (remaining > len) { 4176 memcpy(buf + (*buf_len), in, len); 4177 *(buf_len) += len; 4178 return 0; 4179 } 4180 memcpy(buf + (*buf_len), in, remaining); 4181 4182 /* 4183 * If we get here we've filled the buffer, so process it 4184 */ 4185 len -= remaining; 4186 in += remaining; 4187 if (out == NULL) { 4188 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE)) 4189 return -1; 4190 } else if (EVP_CIPHER_CTX_encrypting(ctx)) { 4191 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out, 4192 AES_BLOCK_SIZE)) 4193 return -1; 4194 } else { 4195 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out, 4196 AES_BLOCK_SIZE)) 4197 return -1; 4198 } 4199 written_len = AES_BLOCK_SIZE; 4200 *buf_len = 0; 4201 if (out != NULL) 4202 out += AES_BLOCK_SIZE; 4203 } 4204 4205 /* Do we have a partial block to handle at the end? */ 4206 trailing_len = len % AES_BLOCK_SIZE; 4207 4208 /* 4209 * If we've got some full blocks to handle, then process these first 4210 */ 4211 if (len != trailing_len) { 4212 if (out == NULL) { 4213 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len)) 4214 return -1; 4215 } else if (EVP_CIPHER_CTX_encrypting(ctx)) { 4216 if (!CRYPTO_ocb128_encrypt 4217 (&octx->ocb, in, out, len - trailing_len)) 4218 return -1; 4219 } else { 4220 if (!CRYPTO_ocb128_decrypt 4221 (&octx->ocb, in, out, len - trailing_len)) 4222 return -1; 4223 } 4224 written_len += len - trailing_len; 4225 in += len - trailing_len; 4226 } 4227 4228 /* Handle any trailing partial block */ 4229 if (trailing_len > 0) { 4230 memcpy(buf, in, trailing_len); 4231 *buf_len = trailing_len; 4232 } 4233 4234 return written_len; 4235 } else { 4236 /* 4237 * First of all empty the buffer of any partial block that we might 4238 * have been provided - both for data and AAD 4239 */ 4240 if (octx->data_buf_len > 0) { 4241 if (EVP_CIPHER_CTX_encrypting(ctx)) { 4242 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out, 4243 octx->data_buf_len)) 4244 return -1; 4245 } else { 4246 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out, 4247 octx->data_buf_len)) 4248 return -1; 4249 } 4250 written_len = octx->data_buf_len; 4251 octx->data_buf_len = 0; 4252 } 4253 if (octx->aad_buf_len > 0) { 4254 if (!CRYPTO_ocb128_aad 4255 (&octx->ocb, octx->aad_buf, octx->aad_buf_len)) 4256 return -1; 4257 octx->aad_buf_len = 0; 4258 } 4259 /* If decrypting then verify */ 4260 if (!EVP_CIPHER_CTX_encrypting(ctx)) { 4261 if (octx->taglen < 0) 4262 return -1; 4263 if (CRYPTO_ocb128_finish(&octx->ocb, 4264 octx->tag, octx->taglen) != 0) 4265 return -1; 4266 octx->iv_set = 0; 4267 return written_len; 4268 } 4269 /* If encrypting then just get the tag */ 4270 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1) 4271 return -1; 4272 /* Don't reuse the IV */ 4273 octx->iv_set = 0; 4274 return written_len; 4275 } 4276 } 4277 4278 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c) 4279 { 4280 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c); 4281 CRYPTO_ocb128_cleanup(&octx->ocb); 4282 return 1; 4283 } 4284 4285 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB, 4286 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4287 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB, 4288 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4289 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB, 4290 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4291 #endif /* OPENSSL_NO_OCB */ 4292