1 /* 2 * Copyright 2001-2020 The OpenSSL Project Authors. All Rights Reserved. 3 * 4 * Licensed under the OpenSSL license (the "License"). You may not use 5 * this file except in compliance with the License. You can obtain a copy 6 * in the file LICENSE in the source distribution or at 7 * https://www.openssl.org/source/license.html 8 */ 9 10 #include <openssl/opensslconf.h> 11 #include <openssl/crypto.h> 12 #include <openssl/evp.h> 13 #include <openssl/err.h> 14 #include <string.h> 15 #include <assert.h> 16 #include <openssl/aes.h> 17 #include "crypto/evp.h" 18 #include "modes_local.h" 19 #include <openssl/rand.h> 20 #include "evp_local.h" 21 22 typedef struct { 23 union { 24 double align; 25 AES_KEY ks; 26 } ks; 27 block128_f block; 28 union { 29 cbc128_f cbc; 30 ctr128_f ctr; 31 } stream; 32 } EVP_AES_KEY; 33 34 typedef struct { 35 union { 36 double align; 37 AES_KEY ks; 38 } ks; /* AES key schedule to use */ 39 int key_set; /* Set if key initialised */ 40 int iv_set; /* Set if an iv is set */ 41 GCM128_CONTEXT gcm; 42 unsigned char *iv; /* Temporary IV store */ 43 int ivlen; /* IV length */ 44 int taglen; 45 int iv_gen; /* It is OK to generate IVs */ 46 int tls_aad_len; /* TLS AAD length */ 47 ctr128_f ctr; 48 } EVP_AES_GCM_CTX; 49 50 typedef struct { 51 union { 52 double align; 53 AES_KEY ks; 54 } ks1, ks2; /* AES key schedules to use */ 55 XTS128_CONTEXT xts; 56 void (*stream) (const unsigned char *in, 57 unsigned char *out, size_t length, 58 const AES_KEY *key1, const AES_KEY *key2, 59 const unsigned char iv[16]); 60 } EVP_AES_XTS_CTX; 61 62 typedef struct { 63 union { 64 double align; 65 AES_KEY ks; 66 } ks; /* AES key schedule to use */ 67 int key_set; /* Set if key initialised */ 68 int iv_set; /* Set if an iv is set */ 69 int tag_set; /* Set if tag is valid */ 70 int len_set; /* Set if message length set */ 71 int L, M; /* L and M parameters from RFC3610 */ 72 int tls_aad_len; /* TLS AAD length */ 73 CCM128_CONTEXT ccm; 74 ccm128_f str; 75 } EVP_AES_CCM_CTX; 76 77 #ifndef OPENSSL_NO_OCB 78 typedef struct { 79 union { 80 double align; 81 AES_KEY ks; 82 } ksenc; /* AES key schedule to use for encryption */ 83 union { 84 double align; 85 AES_KEY ks; 86 } ksdec; /* AES key schedule to use for decryption */ 87 int key_set; /* Set if key initialised */ 88 int iv_set; /* Set if an iv is set */ 89 OCB128_CONTEXT ocb; 90 unsigned char *iv; /* Temporary IV store */ 91 unsigned char tag[16]; 92 unsigned char data_buf[16]; /* Store partial data blocks */ 93 unsigned char aad_buf[16]; /* Store partial AAD blocks */ 94 int data_buf_len; 95 int aad_buf_len; 96 int ivlen; /* IV length */ 97 int taglen; 98 } EVP_AES_OCB_CTX; 99 #endif 100 101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4)) 102 103 #ifdef VPAES_ASM 104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits, 105 AES_KEY *key); 106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits, 107 AES_KEY *key); 108 109 void vpaes_encrypt(const unsigned char *in, unsigned char *out, 110 const AES_KEY *key); 111 void vpaes_decrypt(const unsigned char *in, unsigned char *out, 112 const AES_KEY *key); 113 114 void vpaes_cbc_encrypt(const unsigned char *in, 115 unsigned char *out, 116 size_t length, 117 const AES_KEY *key, unsigned char *ivec, int enc); 118 #endif 119 #ifdef BSAES_ASM 120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out, 121 size_t length, const AES_KEY *key, 122 unsigned char ivec[16], int enc); 123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, 124 size_t len, const AES_KEY *key, 125 const unsigned char ivec[16]); 126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out, 127 size_t len, const AES_KEY *key1, 128 const AES_KEY *key2, const unsigned char iv[16]); 129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out, 130 size_t len, const AES_KEY *key1, 131 const AES_KEY *key2, const unsigned char iv[16]); 132 #endif 133 #ifdef AES_CTR_ASM 134 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out, 135 size_t blocks, const AES_KEY *key, 136 const unsigned char ivec[AES_BLOCK_SIZE]); 137 #endif 138 #ifdef AES_XTS_ASM 139 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len, 140 const AES_KEY *key1, const AES_KEY *key2, 141 const unsigned char iv[16]); 142 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len, 143 const AES_KEY *key1, const AES_KEY *key2, 144 const unsigned char iv[16]); 145 #endif 146 147 /* increment counter (64-bit int) by 1 */ 148 static void ctr64_inc(unsigned char *counter) 149 { 150 int n = 8; 151 unsigned char c; 152 153 do { 154 --n; 155 c = counter[n]; 156 ++c; 157 counter[n] = c; 158 if (c) 159 return; 160 } while (n); 161 } 162 163 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC)) 164 # include "ppc_arch.h" 165 # ifdef VPAES_ASM 166 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC) 167 # endif 168 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207) 169 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key 170 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key 171 # define HWAES_encrypt aes_p8_encrypt 172 # define HWAES_decrypt aes_p8_decrypt 173 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt 174 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks 175 # define HWAES_xts_encrypt aes_p8_xts_encrypt 176 # define HWAES_xts_decrypt aes_p8_xts_decrypt 177 #endif 178 179 #if defined(OPENSSL_CPUID_OBJ) && ( \ 180 ((defined(__i386) || defined(__i386__) || \ 181 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \ 182 defined(__x86_64) || defined(__x86_64__) || \ 183 defined(_M_AMD64) || defined(_M_X64) ) 184 185 extern unsigned int OPENSSL_ia32cap_P[]; 186 187 # ifdef VPAES_ASM 188 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) 189 # endif 190 # ifdef BSAES_ASM 191 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) 192 # endif 193 /* 194 * AES-NI section 195 */ 196 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32))) 197 198 int aesni_set_encrypt_key(const unsigned char *userKey, int bits, 199 AES_KEY *key); 200 int aesni_set_decrypt_key(const unsigned char *userKey, int bits, 201 AES_KEY *key); 202 203 void aesni_encrypt(const unsigned char *in, unsigned char *out, 204 const AES_KEY *key); 205 void aesni_decrypt(const unsigned char *in, unsigned char *out, 206 const AES_KEY *key); 207 208 void aesni_ecb_encrypt(const unsigned char *in, 209 unsigned char *out, 210 size_t length, const AES_KEY *key, int enc); 211 void aesni_cbc_encrypt(const unsigned char *in, 212 unsigned char *out, 213 size_t length, 214 const AES_KEY *key, unsigned char *ivec, int enc); 215 216 void aesni_ctr32_encrypt_blocks(const unsigned char *in, 217 unsigned char *out, 218 size_t blocks, 219 const void *key, const unsigned char *ivec); 220 221 void aesni_xts_encrypt(const unsigned char *in, 222 unsigned char *out, 223 size_t length, 224 const AES_KEY *key1, const AES_KEY *key2, 225 const unsigned char iv[16]); 226 227 void aesni_xts_decrypt(const unsigned char *in, 228 unsigned char *out, 229 size_t length, 230 const AES_KEY *key1, const AES_KEY *key2, 231 const unsigned char iv[16]); 232 233 void aesni_ccm64_encrypt_blocks(const unsigned char *in, 234 unsigned char *out, 235 size_t blocks, 236 const void *key, 237 const unsigned char ivec[16], 238 unsigned char cmac[16]); 239 240 void aesni_ccm64_decrypt_blocks(const unsigned char *in, 241 unsigned char *out, 242 size_t blocks, 243 const void *key, 244 const unsigned char ivec[16], 245 unsigned char cmac[16]); 246 247 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64) 248 size_t aesni_gcm_encrypt(const unsigned char *in, 249 unsigned char *out, 250 size_t len, 251 const void *key, unsigned char ivec[16], u64 *Xi); 252 # define AES_gcm_encrypt aesni_gcm_encrypt 253 size_t aesni_gcm_decrypt(const unsigned char *in, 254 unsigned char *out, 255 size_t len, 256 const void *key, unsigned char ivec[16], u64 *Xi); 257 # define AES_gcm_decrypt aesni_gcm_decrypt 258 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in, 259 size_t len); 260 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \ 261 gctx->gcm.ghash==gcm_ghash_avx) 262 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \ 263 gctx->gcm.ghash==gcm_ghash_avx) 264 # undef AES_GCM_ASM2 /* minor size optimization */ 265 # endif 266 267 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 268 const unsigned char *iv, int enc) 269 { 270 int ret, mode; 271 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 272 273 mode = EVP_CIPHER_CTX_mode(ctx); 274 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 275 && !enc) { 276 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 277 &dat->ks.ks); 278 dat->block = (block128_f) aesni_decrypt; 279 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 280 (cbc128_f) aesni_cbc_encrypt : NULL; 281 } else { 282 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 283 &dat->ks.ks); 284 dat->block = (block128_f) aesni_encrypt; 285 if (mode == EVP_CIPH_CBC_MODE) 286 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt; 287 else if (mode == EVP_CIPH_CTR_MODE) 288 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; 289 else 290 dat->stream.cbc = NULL; 291 } 292 293 if (ret < 0) { 294 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 295 return 0; 296 } 297 298 return 1; 299 } 300 301 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 302 const unsigned char *in, size_t len) 303 { 304 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks, 305 EVP_CIPHER_CTX_iv_noconst(ctx), 306 EVP_CIPHER_CTX_encrypting(ctx)); 307 308 return 1; 309 } 310 311 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 312 const unsigned char *in, size_t len) 313 { 314 size_t bl = EVP_CIPHER_CTX_block_size(ctx); 315 316 if (len < bl) 317 return 1; 318 319 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks, 320 EVP_CIPHER_CTX_encrypting(ctx)); 321 322 return 1; 323 } 324 325 # define aesni_ofb_cipher aes_ofb_cipher 326 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 327 const unsigned char *in, size_t len); 328 329 # define aesni_cfb_cipher aes_cfb_cipher 330 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 331 const unsigned char *in, size_t len); 332 333 # define aesni_cfb8_cipher aes_cfb8_cipher 334 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 335 const unsigned char *in, size_t len); 336 337 # define aesni_cfb1_cipher aes_cfb1_cipher 338 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 339 const unsigned char *in, size_t len); 340 341 # define aesni_ctr_cipher aes_ctr_cipher 342 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 343 const unsigned char *in, size_t len); 344 345 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 346 const unsigned char *iv, int enc) 347 { 348 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 349 if (!iv && !key) 350 return 1; 351 if (key) { 352 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 353 &gctx->ks.ks); 354 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt); 355 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; 356 /* 357 * If we have an iv can set it directly, otherwise use saved IV. 358 */ 359 if (iv == NULL && gctx->iv_set) 360 iv = gctx->iv; 361 if (iv) { 362 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 363 gctx->iv_set = 1; 364 } 365 gctx->key_set = 1; 366 } else { 367 /* If key set use IV, otherwise copy */ 368 if (gctx->key_set) 369 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 370 else 371 memcpy(gctx->iv, iv, gctx->ivlen); 372 gctx->iv_set = 1; 373 gctx->iv_gen = 0; 374 } 375 return 1; 376 } 377 378 # define aesni_gcm_cipher aes_gcm_cipher 379 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 380 const unsigned char *in, size_t len); 381 382 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 383 const unsigned char *iv, int enc) 384 { 385 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 386 387 if (!iv && !key) 388 return 1; 389 390 if (key) { 391 /* The key is two half length keys in reality */ 392 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2; 393 394 /* 395 * Verify that the two keys are different. 396 * 397 * This addresses Rogaway's vulnerability. 398 * See comment in aes_xts_init_key() below. 399 */ 400 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) { 401 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS); 402 return 0; 403 } 404 405 /* key_len is two AES keys */ 406 if (enc) { 407 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 408 &xctx->ks1.ks); 409 xctx->xts.block1 = (block128_f) aesni_encrypt; 410 xctx->stream = aesni_xts_encrypt; 411 } else { 412 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 413 &xctx->ks1.ks); 414 xctx->xts.block1 = (block128_f) aesni_decrypt; 415 xctx->stream = aesni_xts_decrypt; 416 } 417 418 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 419 EVP_CIPHER_CTX_key_length(ctx) * 4, 420 &xctx->ks2.ks); 421 xctx->xts.block2 = (block128_f) aesni_encrypt; 422 423 xctx->xts.key1 = &xctx->ks1; 424 } 425 426 if (iv) { 427 xctx->xts.key2 = &xctx->ks2; 428 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 429 } 430 431 return 1; 432 } 433 434 # define aesni_xts_cipher aes_xts_cipher 435 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 436 const unsigned char *in, size_t len); 437 438 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 439 const unsigned char *iv, int enc) 440 { 441 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 442 if (!iv && !key) 443 return 1; 444 if (key) { 445 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 446 &cctx->ks.ks); 447 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 448 &cctx->ks, (block128_f) aesni_encrypt); 449 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks : 450 (ccm128_f) aesni_ccm64_decrypt_blocks; 451 cctx->key_set = 1; 452 } 453 if (iv) { 454 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 455 cctx->iv_set = 1; 456 } 457 return 1; 458 } 459 460 # define aesni_ccm_cipher aes_ccm_cipher 461 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 462 const unsigned char *in, size_t len); 463 464 # ifndef OPENSSL_NO_OCB 465 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out, 466 size_t blocks, const void *key, 467 size_t start_block_num, 468 unsigned char offset_i[16], 469 const unsigned char L_[][16], 470 unsigned char checksum[16]); 471 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out, 472 size_t blocks, const void *key, 473 size_t start_block_num, 474 unsigned char offset_i[16], 475 const unsigned char L_[][16], 476 unsigned char checksum[16]); 477 478 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 479 const unsigned char *iv, int enc) 480 { 481 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 482 if (!iv && !key) 483 return 1; 484 if (key) { 485 do { 486 /* 487 * We set both the encrypt and decrypt key here because decrypt 488 * needs both. We could possibly optimise to remove setting the 489 * decrypt for an encryption operation. 490 */ 491 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 492 &octx->ksenc.ks); 493 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 494 &octx->ksdec.ks); 495 if (!CRYPTO_ocb128_init(&octx->ocb, 496 &octx->ksenc.ks, &octx->ksdec.ks, 497 (block128_f) aesni_encrypt, 498 (block128_f) aesni_decrypt, 499 enc ? aesni_ocb_encrypt 500 : aesni_ocb_decrypt)) 501 return 0; 502 } 503 while (0); 504 505 /* 506 * If we have an iv we can set it directly, otherwise use saved IV. 507 */ 508 if (iv == NULL && octx->iv_set) 509 iv = octx->iv; 510 if (iv) { 511 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 512 != 1) 513 return 0; 514 octx->iv_set = 1; 515 } 516 octx->key_set = 1; 517 } else { 518 /* If key set use IV, otherwise copy */ 519 if (octx->key_set) 520 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 521 else 522 memcpy(octx->iv, iv, octx->ivlen); 523 octx->iv_set = 1; 524 } 525 return 1; 526 } 527 528 # define aesni_ocb_cipher aes_ocb_cipher 529 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 530 const unsigned char *in, size_t len); 531 # endif /* OPENSSL_NO_OCB */ 532 533 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 534 static const EVP_CIPHER aesni_##keylen##_##mode = { \ 535 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 536 flags|EVP_CIPH_##MODE##_MODE, \ 537 aesni_init_key, \ 538 aesni_##mode##_cipher, \ 539 NULL, \ 540 sizeof(EVP_AES_KEY), \ 541 NULL,NULL,NULL,NULL }; \ 542 static const EVP_CIPHER aes_##keylen##_##mode = { \ 543 nid##_##keylen##_##nmode,blocksize, \ 544 keylen/8,ivlen, \ 545 flags|EVP_CIPH_##MODE##_MODE, \ 546 aes_init_key, \ 547 aes_##mode##_cipher, \ 548 NULL, \ 549 sizeof(EVP_AES_KEY), \ 550 NULL,NULL,NULL,NULL }; \ 551 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 552 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } 553 554 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 555 static const EVP_CIPHER aesni_##keylen##_##mode = { \ 556 nid##_##keylen##_##mode,blocksize, \ 557 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 558 flags|EVP_CIPH_##MODE##_MODE, \ 559 aesni_##mode##_init_key, \ 560 aesni_##mode##_cipher, \ 561 aes_##mode##_cleanup, \ 562 sizeof(EVP_AES_##MODE##_CTX), \ 563 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 564 static const EVP_CIPHER aes_##keylen##_##mode = { \ 565 nid##_##keylen##_##mode,blocksize, \ 566 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 567 flags|EVP_CIPH_##MODE##_MODE, \ 568 aes_##mode##_init_key, \ 569 aes_##mode##_cipher, \ 570 aes_##mode##_cleanup, \ 571 sizeof(EVP_AES_##MODE##_CTX), \ 572 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 573 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 574 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } 575 576 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__)) 577 578 # include "sparc_arch.h" 579 580 extern unsigned int OPENSSL_sparcv9cap_P[]; 581 582 /* 583 * Initial Fujitsu SPARC64 X support 584 */ 585 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX) 586 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key 587 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key 588 # define HWAES_encrypt aes_fx_encrypt 589 # define HWAES_decrypt aes_fx_decrypt 590 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt 591 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks 592 593 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES) 594 595 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks); 596 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks); 597 void aes_t4_encrypt(const unsigned char *in, unsigned char *out, 598 const AES_KEY *key); 599 void aes_t4_decrypt(const unsigned char *in, unsigned char *out, 600 const AES_KEY *key); 601 /* 602 * Key-length specific subroutines were chosen for following reason. 603 * Each SPARC T4 core can execute up to 8 threads which share core's 604 * resources. Loading as much key material to registers allows to 605 * minimize references to shared memory interface, as well as amount 606 * of instructions in inner loops [much needed on T4]. But then having 607 * non-key-length specific routines would require conditional branches 608 * either in inner loops or on subroutines' entries. Former is hardly 609 * acceptable, while latter means code size increase to size occupied 610 * by multiple key-length specific subroutines, so why fight? 611 */ 612 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 613 size_t len, const AES_KEY *key, 614 unsigned char *ivec); 615 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 616 size_t len, const AES_KEY *key, 617 unsigned char *ivec); 618 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 619 size_t len, const AES_KEY *key, 620 unsigned char *ivec); 621 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 622 size_t len, const AES_KEY *key, 623 unsigned char *ivec); 624 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 625 size_t len, const AES_KEY *key, 626 unsigned char *ivec); 627 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 628 size_t len, const AES_KEY *key, 629 unsigned char *ivec); 630 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 631 size_t blocks, const AES_KEY *key, 632 unsigned char *ivec); 633 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 634 size_t blocks, const AES_KEY *key, 635 unsigned char *ivec); 636 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 637 size_t blocks, const AES_KEY *key, 638 unsigned char *ivec); 639 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out, 640 size_t blocks, const AES_KEY *key1, 641 const AES_KEY *key2, const unsigned char *ivec); 642 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out, 643 size_t blocks, const AES_KEY *key1, 644 const AES_KEY *key2, const unsigned char *ivec); 645 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out, 646 size_t blocks, const AES_KEY *key1, 647 const AES_KEY *key2, const unsigned char *ivec); 648 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out, 649 size_t blocks, const AES_KEY *key1, 650 const AES_KEY *key2, const unsigned char *ivec); 651 652 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 653 const unsigned char *iv, int enc) 654 { 655 int ret, mode, bits; 656 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 657 658 mode = EVP_CIPHER_CTX_mode(ctx); 659 bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 660 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 661 && !enc) { 662 ret = 0; 663 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks); 664 dat->block = (block128_f) aes_t4_decrypt; 665 switch (bits) { 666 case 128: 667 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 668 (cbc128_f) aes128_t4_cbc_decrypt : NULL; 669 break; 670 case 192: 671 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 672 (cbc128_f) aes192_t4_cbc_decrypt : NULL; 673 break; 674 case 256: 675 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 676 (cbc128_f) aes256_t4_cbc_decrypt : NULL; 677 break; 678 default: 679 ret = -1; 680 } 681 } else { 682 ret = 0; 683 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks); 684 dat->block = (block128_f) aes_t4_encrypt; 685 switch (bits) { 686 case 128: 687 if (mode == EVP_CIPH_CBC_MODE) 688 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt; 689 else if (mode == EVP_CIPH_CTR_MODE) 690 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt; 691 else 692 dat->stream.cbc = NULL; 693 break; 694 case 192: 695 if (mode == EVP_CIPH_CBC_MODE) 696 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt; 697 else if (mode == EVP_CIPH_CTR_MODE) 698 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt; 699 else 700 dat->stream.cbc = NULL; 701 break; 702 case 256: 703 if (mode == EVP_CIPH_CBC_MODE) 704 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt; 705 else if (mode == EVP_CIPH_CTR_MODE) 706 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt; 707 else 708 dat->stream.cbc = NULL; 709 break; 710 default: 711 ret = -1; 712 } 713 } 714 715 if (ret < 0) { 716 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 717 return 0; 718 } 719 720 return 1; 721 } 722 723 # define aes_t4_cbc_cipher aes_cbc_cipher 724 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 725 const unsigned char *in, size_t len); 726 727 # define aes_t4_ecb_cipher aes_ecb_cipher 728 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 729 const unsigned char *in, size_t len); 730 731 # define aes_t4_ofb_cipher aes_ofb_cipher 732 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 733 const unsigned char *in, size_t len); 734 735 # define aes_t4_cfb_cipher aes_cfb_cipher 736 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 737 const unsigned char *in, size_t len); 738 739 # define aes_t4_cfb8_cipher aes_cfb8_cipher 740 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 741 const unsigned char *in, size_t len); 742 743 # define aes_t4_cfb1_cipher aes_cfb1_cipher 744 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 745 const unsigned char *in, size_t len); 746 747 # define aes_t4_ctr_cipher aes_ctr_cipher 748 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 749 const unsigned char *in, size_t len); 750 751 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 752 const unsigned char *iv, int enc) 753 { 754 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 755 if (!iv && !key) 756 return 1; 757 if (key) { 758 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 759 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks); 760 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 761 (block128_f) aes_t4_encrypt); 762 switch (bits) { 763 case 128: 764 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt; 765 break; 766 case 192: 767 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt; 768 break; 769 case 256: 770 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt; 771 break; 772 default: 773 return 0; 774 } 775 /* 776 * If we have an iv can set it directly, otherwise use saved IV. 777 */ 778 if (iv == NULL && gctx->iv_set) 779 iv = gctx->iv; 780 if (iv) { 781 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 782 gctx->iv_set = 1; 783 } 784 gctx->key_set = 1; 785 } else { 786 /* If key set use IV, otherwise copy */ 787 if (gctx->key_set) 788 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 789 else 790 memcpy(gctx->iv, iv, gctx->ivlen); 791 gctx->iv_set = 1; 792 gctx->iv_gen = 0; 793 } 794 return 1; 795 } 796 797 # define aes_t4_gcm_cipher aes_gcm_cipher 798 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 799 const unsigned char *in, size_t len); 800 801 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 802 const unsigned char *iv, int enc) 803 { 804 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 805 806 if (!iv && !key) 807 return 1; 808 809 if (key) { 810 /* The key is two half length keys in reality */ 811 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2; 812 const int bits = bytes * 8; 813 814 /* 815 * Verify that the two keys are different. 816 * 817 * This addresses Rogaway's vulnerability. 818 * See comment in aes_xts_init_key() below. 819 */ 820 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) { 821 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS); 822 return 0; 823 } 824 825 xctx->stream = NULL; 826 /* key_len is two AES keys */ 827 if (enc) { 828 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks); 829 xctx->xts.block1 = (block128_f) aes_t4_encrypt; 830 switch (bits) { 831 case 128: 832 xctx->stream = aes128_t4_xts_encrypt; 833 break; 834 case 256: 835 xctx->stream = aes256_t4_xts_encrypt; 836 break; 837 default: 838 return 0; 839 } 840 } else { 841 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 842 &xctx->ks1.ks); 843 xctx->xts.block1 = (block128_f) aes_t4_decrypt; 844 switch (bits) { 845 case 128: 846 xctx->stream = aes128_t4_xts_decrypt; 847 break; 848 case 256: 849 xctx->stream = aes256_t4_xts_decrypt; 850 break; 851 default: 852 return 0; 853 } 854 } 855 856 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 857 EVP_CIPHER_CTX_key_length(ctx) * 4, 858 &xctx->ks2.ks); 859 xctx->xts.block2 = (block128_f) aes_t4_encrypt; 860 861 xctx->xts.key1 = &xctx->ks1; 862 } 863 864 if (iv) { 865 xctx->xts.key2 = &xctx->ks2; 866 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 867 } 868 869 return 1; 870 } 871 872 # define aes_t4_xts_cipher aes_xts_cipher 873 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 874 const unsigned char *in, size_t len); 875 876 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 877 const unsigned char *iv, int enc) 878 { 879 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 880 if (!iv && !key) 881 return 1; 882 if (key) { 883 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 884 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks); 885 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 886 &cctx->ks, (block128_f) aes_t4_encrypt); 887 cctx->str = NULL; 888 cctx->key_set = 1; 889 } 890 if (iv) { 891 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 892 cctx->iv_set = 1; 893 } 894 return 1; 895 } 896 897 # define aes_t4_ccm_cipher aes_ccm_cipher 898 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 899 const unsigned char *in, size_t len); 900 901 # ifndef OPENSSL_NO_OCB 902 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 903 const unsigned char *iv, int enc) 904 { 905 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 906 if (!iv && !key) 907 return 1; 908 if (key) { 909 do { 910 /* 911 * We set both the encrypt and decrypt key here because decrypt 912 * needs both. We could possibly optimise to remove setting the 913 * decrypt for an encryption operation. 914 */ 915 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 916 &octx->ksenc.ks); 917 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 918 &octx->ksdec.ks); 919 if (!CRYPTO_ocb128_init(&octx->ocb, 920 &octx->ksenc.ks, &octx->ksdec.ks, 921 (block128_f) aes_t4_encrypt, 922 (block128_f) aes_t4_decrypt, 923 NULL)) 924 return 0; 925 } 926 while (0); 927 928 /* 929 * If we have an iv we can set it directly, otherwise use saved IV. 930 */ 931 if (iv == NULL && octx->iv_set) 932 iv = octx->iv; 933 if (iv) { 934 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 935 != 1) 936 return 0; 937 octx->iv_set = 1; 938 } 939 octx->key_set = 1; 940 } else { 941 /* If key set use IV, otherwise copy */ 942 if (octx->key_set) 943 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 944 else 945 memcpy(octx->iv, iv, octx->ivlen); 946 octx->iv_set = 1; 947 } 948 return 1; 949 } 950 951 # define aes_t4_ocb_cipher aes_ocb_cipher 952 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 953 const unsigned char *in, size_t len); 954 # endif /* OPENSSL_NO_OCB */ 955 956 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 957 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \ 958 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 959 flags|EVP_CIPH_##MODE##_MODE, \ 960 aes_t4_init_key, \ 961 aes_t4_##mode##_cipher, \ 962 NULL, \ 963 sizeof(EVP_AES_KEY), \ 964 NULL,NULL,NULL,NULL }; \ 965 static const EVP_CIPHER aes_##keylen##_##mode = { \ 966 nid##_##keylen##_##nmode,blocksize, \ 967 keylen/8,ivlen, \ 968 flags|EVP_CIPH_##MODE##_MODE, \ 969 aes_init_key, \ 970 aes_##mode##_cipher, \ 971 NULL, \ 972 sizeof(EVP_AES_KEY), \ 973 NULL,NULL,NULL,NULL }; \ 974 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 975 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; } 976 977 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 978 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \ 979 nid##_##keylen##_##mode,blocksize, \ 980 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 981 flags|EVP_CIPH_##MODE##_MODE, \ 982 aes_t4_##mode##_init_key, \ 983 aes_t4_##mode##_cipher, \ 984 aes_##mode##_cleanup, \ 985 sizeof(EVP_AES_##MODE##_CTX), \ 986 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 987 static const EVP_CIPHER aes_##keylen##_##mode = { \ 988 nid##_##keylen##_##mode,blocksize, \ 989 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 990 flags|EVP_CIPH_##MODE##_MODE, \ 991 aes_##mode##_init_key, \ 992 aes_##mode##_cipher, \ 993 aes_##mode##_cleanup, \ 994 sizeof(EVP_AES_##MODE##_CTX), \ 995 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 996 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 997 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; } 998 999 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__) 1000 /* 1001 * IBM S390X support 1002 */ 1003 # include "s390x_arch.h" 1004 1005 typedef struct { 1006 union { 1007 double align; 1008 /*- 1009 * KM-AES parameter block - begin 1010 * (see z/Architecture Principles of Operation >= SA22-7832-06) 1011 */ 1012 struct { 1013 unsigned char k[32]; 1014 } param; 1015 /* KM-AES parameter block - end */ 1016 } km; 1017 unsigned int fc; 1018 } S390X_AES_ECB_CTX; 1019 1020 typedef struct { 1021 union { 1022 double align; 1023 /*- 1024 * KMO-AES parameter block - begin 1025 * (see z/Architecture Principles of Operation >= SA22-7832-08) 1026 */ 1027 struct { 1028 unsigned char cv[16]; 1029 unsigned char k[32]; 1030 } param; 1031 /* KMO-AES parameter block - end */ 1032 } kmo; 1033 unsigned int fc; 1034 1035 int res; 1036 } S390X_AES_OFB_CTX; 1037 1038 typedef struct { 1039 union { 1040 double align; 1041 /*- 1042 * KMF-AES parameter block - begin 1043 * (see z/Architecture Principles of Operation >= SA22-7832-08) 1044 */ 1045 struct { 1046 unsigned char cv[16]; 1047 unsigned char k[32]; 1048 } param; 1049 /* KMF-AES parameter block - end */ 1050 } kmf; 1051 unsigned int fc; 1052 1053 int res; 1054 } S390X_AES_CFB_CTX; 1055 1056 typedef struct { 1057 union { 1058 double align; 1059 /*- 1060 * KMA-GCM-AES parameter block - begin 1061 * (see z/Architecture Principles of Operation >= SA22-7832-11) 1062 */ 1063 struct { 1064 unsigned char reserved[12]; 1065 union { 1066 unsigned int w; 1067 unsigned char b[4]; 1068 } cv; 1069 union { 1070 unsigned long long g[2]; 1071 unsigned char b[16]; 1072 } t; 1073 unsigned char h[16]; 1074 unsigned long long taadl; 1075 unsigned long long tpcl; 1076 union { 1077 unsigned long long g[2]; 1078 unsigned int w[4]; 1079 } j0; 1080 unsigned char k[32]; 1081 } param; 1082 /* KMA-GCM-AES parameter block - end */ 1083 } kma; 1084 unsigned int fc; 1085 int key_set; 1086 1087 unsigned char *iv; 1088 int ivlen; 1089 int iv_set; 1090 int iv_gen; 1091 1092 int taglen; 1093 1094 unsigned char ares[16]; 1095 unsigned char mres[16]; 1096 unsigned char kres[16]; 1097 int areslen; 1098 int mreslen; 1099 int kreslen; 1100 1101 int tls_aad_len; 1102 } S390X_AES_GCM_CTX; 1103 1104 typedef struct { 1105 union { 1106 double align; 1107 /*- 1108 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and 1109 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's 1110 * rounds field is used to store the function code and that the key 1111 * schedule is not stored (if aes hardware support is detected). 1112 */ 1113 struct { 1114 unsigned char pad[16]; 1115 AES_KEY k; 1116 } key; 1117 1118 struct { 1119 /*- 1120 * KMAC-AES parameter block - begin 1121 * (see z/Architecture Principles of Operation >= SA22-7832-08) 1122 */ 1123 struct { 1124 union { 1125 unsigned long long g[2]; 1126 unsigned char b[16]; 1127 } icv; 1128 unsigned char k[32]; 1129 } kmac_param; 1130 /* KMAC-AES parameter block - end */ 1131 1132 union { 1133 unsigned long long g[2]; 1134 unsigned char b[16]; 1135 } nonce; 1136 union { 1137 unsigned long long g[2]; 1138 unsigned char b[16]; 1139 } buf; 1140 1141 unsigned long long blocks; 1142 int l; 1143 int m; 1144 int tls_aad_len; 1145 int iv_set; 1146 int tag_set; 1147 int len_set; 1148 int key_set; 1149 1150 unsigned char pad[140]; 1151 unsigned int fc; 1152 } ccm; 1153 } aes; 1154 } S390X_AES_CCM_CTX; 1155 1156 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */ 1157 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6)) 1158 1159 /* Most modes of operation need km for partial block processing. */ 1160 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1161 S390X_CAPBIT(S390X_AES_128)) 1162 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1163 S390X_CAPBIT(S390X_AES_192)) 1164 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1165 S390X_CAPBIT(S390X_AES_256)) 1166 1167 # define s390x_aes_init_key aes_init_key 1168 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 1169 const unsigned char *iv, int enc); 1170 1171 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */ 1172 # define S390X_aes_192_cbc_CAPABLE 1 1173 # define S390X_aes_256_cbc_CAPABLE 1 1174 # define S390X_AES_CBC_CTX EVP_AES_KEY 1175 1176 # define s390x_aes_cbc_init_key aes_init_key 1177 1178 # define s390x_aes_cbc_cipher aes_cbc_cipher 1179 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1180 const unsigned char *in, size_t len); 1181 1182 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE 1183 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE 1184 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE 1185 1186 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx, 1187 const unsigned char *key, 1188 const unsigned char *iv, int enc) 1189 { 1190 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx); 1191 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1192 1193 cctx->fc = S390X_AES_FC(keylen); 1194 if (!enc) 1195 cctx->fc |= S390X_DECRYPT; 1196 1197 memcpy(cctx->km.param.k, key, keylen); 1198 return 1; 1199 } 1200 1201 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1202 const unsigned char *in, size_t len) 1203 { 1204 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx); 1205 1206 s390x_km(in, len, out, cctx->fc, &cctx->km.param); 1207 return 1; 1208 } 1209 1210 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \ 1211 (OPENSSL_s390xcap_P.kmo[0] & \ 1212 S390X_CAPBIT(S390X_AES_128))) 1213 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \ 1214 (OPENSSL_s390xcap_P.kmo[0] & \ 1215 S390X_CAPBIT(S390X_AES_192))) 1216 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \ 1217 (OPENSSL_s390xcap_P.kmo[0] & \ 1218 S390X_CAPBIT(S390X_AES_256))) 1219 1220 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx, 1221 const unsigned char *key, 1222 const unsigned char *ivec, int enc) 1223 { 1224 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx); 1225 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx); 1226 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1227 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1228 1229 memcpy(cctx->kmo.param.cv, iv, ivlen); 1230 memcpy(cctx->kmo.param.k, key, keylen); 1231 cctx->fc = S390X_AES_FC(keylen); 1232 cctx->res = 0; 1233 return 1; 1234 } 1235 1236 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1237 const unsigned char *in, size_t len) 1238 { 1239 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx); 1240 int n = cctx->res; 1241 int rem; 1242 1243 while (n && len) { 1244 *out = *in ^ cctx->kmo.param.cv[n]; 1245 n = (n + 1) & 0xf; 1246 --len; 1247 ++in; 1248 ++out; 1249 } 1250 1251 rem = len & 0xf; 1252 1253 len &= ~(size_t)0xf; 1254 if (len) { 1255 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param); 1256 1257 out += len; 1258 in += len; 1259 } 1260 1261 if (rem) { 1262 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc, 1263 cctx->kmo.param.k); 1264 1265 while (rem--) { 1266 out[n] = in[n] ^ cctx->kmo.param.cv[n]; 1267 ++n; 1268 } 1269 } 1270 1271 cctx->res = n; 1272 return 1; 1273 } 1274 1275 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \ 1276 (OPENSSL_s390xcap_P.kmf[0] & \ 1277 S390X_CAPBIT(S390X_AES_128))) 1278 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \ 1279 (OPENSSL_s390xcap_P.kmf[0] & \ 1280 S390X_CAPBIT(S390X_AES_192))) 1281 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \ 1282 (OPENSSL_s390xcap_P.kmf[0] & \ 1283 S390X_CAPBIT(S390X_AES_256))) 1284 1285 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx, 1286 const unsigned char *key, 1287 const unsigned char *ivec, int enc) 1288 { 1289 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1290 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx); 1291 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1292 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1293 1294 cctx->fc = S390X_AES_FC(keylen); 1295 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */ 1296 if (!enc) 1297 cctx->fc |= S390X_DECRYPT; 1298 1299 cctx->res = 0; 1300 memcpy(cctx->kmf.param.cv, iv, ivlen); 1301 memcpy(cctx->kmf.param.k, key, keylen); 1302 return 1; 1303 } 1304 1305 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1306 const unsigned char *in, size_t len) 1307 { 1308 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1309 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1310 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 1311 int n = cctx->res; 1312 int rem; 1313 unsigned char tmp; 1314 1315 while (n && len) { 1316 tmp = *in; 1317 *out = cctx->kmf.param.cv[n] ^ tmp; 1318 cctx->kmf.param.cv[n] = enc ? *out : tmp; 1319 n = (n + 1) & 0xf; 1320 --len; 1321 ++in; 1322 ++out; 1323 } 1324 1325 rem = len & 0xf; 1326 1327 len &= ~(size_t)0xf; 1328 if (len) { 1329 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param); 1330 1331 out += len; 1332 in += len; 1333 } 1334 1335 if (rem) { 1336 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv, 1337 S390X_AES_FC(keylen), cctx->kmf.param.k); 1338 1339 while (rem--) { 1340 tmp = in[n]; 1341 out[n] = cctx->kmf.param.cv[n] ^ tmp; 1342 cctx->kmf.param.cv[n] = enc ? out[n] : tmp; 1343 ++n; 1344 } 1345 } 1346 1347 cctx->res = n; 1348 return 1; 1349 } 1350 1351 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1352 S390X_CAPBIT(S390X_AES_128)) 1353 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1354 S390X_CAPBIT(S390X_AES_192)) 1355 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1356 S390X_CAPBIT(S390X_AES_256)) 1357 1358 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx, 1359 const unsigned char *key, 1360 const unsigned char *ivec, int enc) 1361 { 1362 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1363 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx); 1364 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1365 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1366 1367 cctx->fc = S390X_AES_FC(keylen); 1368 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */ 1369 if (!enc) 1370 cctx->fc |= S390X_DECRYPT; 1371 1372 memcpy(cctx->kmf.param.cv, iv, ivlen); 1373 memcpy(cctx->kmf.param.k, key, keylen); 1374 return 1; 1375 } 1376 1377 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1378 const unsigned char *in, size_t len) 1379 { 1380 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1381 1382 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param); 1383 return 1; 1384 } 1385 1386 # define S390X_aes_128_cfb1_CAPABLE 0 1387 # define S390X_aes_192_cfb1_CAPABLE 0 1388 # define S390X_aes_256_cfb1_CAPABLE 0 1389 1390 # define s390x_aes_cfb1_init_key aes_init_key 1391 1392 # define s390x_aes_cfb1_cipher aes_cfb1_cipher 1393 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1394 const unsigned char *in, size_t len); 1395 1396 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */ 1397 # define S390X_aes_192_ctr_CAPABLE 1 1398 # define S390X_aes_256_ctr_CAPABLE 1 1399 # define S390X_AES_CTR_CTX EVP_AES_KEY 1400 1401 # define s390x_aes_ctr_init_key aes_init_key 1402 1403 # define s390x_aes_ctr_cipher aes_ctr_cipher 1404 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1405 const unsigned char *in, size_t len); 1406 1407 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \ 1408 (OPENSSL_s390xcap_P.kma[0] & \ 1409 S390X_CAPBIT(S390X_AES_128))) 1410 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \ 1411 (OPENSSL_s390xcap_P.kma[0] & \ 1412 S390X_CAPBIT(S390X_AES_192))) 1413 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \ 1414 (OPENSSL_s390xcap_P.kma[0] & \ 1415 S390X_CAPBIT(S390X_AES_256))) 1416 1417 /* iv + padding length for iv lengths != 12 */ 1418 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16) 1419 1420 /*- 1421 * Process additional authenticated data. Returns 0 on success. Code is 1422 * big-endian. 1423 */ 1424 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad, 1425 size_t len) 1426 { 1427 unsigned long long alen; 1428 int n, rem; 1429 1430 if (ctx->kma.param.tpcl) 1431 return -2; 1432 1433 alen = ctx->kma.param.taadl + len; 1434 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len)) 1435 return -1; 1436 ctx->kma.param.taadl = alen; 1437 1438 n = ctx->areslen; 1439 if (n) { 1440 while (n && len) { 1441 ctx->ares[n] = *aad; 1442 n = (n + 1) & 0xf; 1443 ++aad; 1444 --len; 1445 } 1446 /* ctx->ares contains a complete block if offset has wrapped around */ 1447 if (!n) { 1448 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param); 1449 ctx->fc |= S390X_KMA_HS; 1450 } 1451 ctx->areslen = n; 1452 } 1453 1454 rem = len & 0xf; 1455 1456 len &= ~(size_t)0xf; 1457 if (len) { 1458 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param); 1459 aad += len; 1460 ctx->fc |= S390X_KMA_HS; 1461 } 1462 1463 if (rem) { 1464 ctx->areslen = rem; 1465 1466 do { 1467 --rem; 1468 ctx->ares[rem] = aad[rem]; 1469 } while (rem); 1470 } 1471 return 0; 1472 } 1473 1474 /*- 1475 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for 1476 * success. Code is big-endian. 1477 */ 1478 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in, 1479 unsigned char *out, size_t len) 1480 { 1481 const unsigned char *inptr; 1482 unsigned long long mlen; 1483 union { 1484 unsigned int w[4]; 1485 unsigned char b[16]; 1486 } buf; 1487 size_t inlen; 1488 int n, rem, i; 1489 1490 mlen = ctx->kma.param.tpcl + len; 1491 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len)) 1492 return -1; 1493 ctx->kma.param.tpcl = mlen; 1494 1495 n = ctx->mreslen; 1496 if (n) { 1497 inptr = in; 1498 inlen = len; 1499 while (n && inlen) { 1500 ctx->mres[n] = *inptr; 1501 n = (n + 1) & 0xf; 1502 ++inptr; 1503 --inlen; 1504 } 1505 /* ctx->mres contains a complete block if offset has wrapped around */ 1506 if (!n) { 1507 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b, 1508 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param); 1509 ctx->fc |= S390X_KMA_HS; 1510 ctx->areslen = 0; 1511 1512 /* previous call already encrypted/decrypted its remainder, 1513 * see comment below */ 1514 n = ctx->mreslen; 1515 while (n) { 1516 *out = buf.b[n]; 1517 n = (n + 1) & 0xf; 1518 ++out; 1519 ++in; 1520 --len; 1521 } 1522 ctx->mreslen = 0; 1523 } 1524 } 1525 1526 rem = len & 0xf; 1527 1528 len &= ~(size_t)0xf; 1529 if (len) { 1530 s390x_kma(ctx->ares, ctx->areslen, in, len, out, 1531 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param); 1532 in += len; 1533 out += len; 1534 ctx->fc |= S390X_KMA_HS; 1535 ctx->areslen = 0; 1536 } 1537 1538 /*- 1539 * If there is a remainder, it has to be saved such that it can be 1540 * processed by kma later. However, we also have to do the for-now 1541 * unauthenticated encryption/decryption part here and now... 1542 */ 1543 if (rem) { 1544 if (!ctx->mreslen) { 1545 buf.w[0] = ctx->kma.param.j0.w[0]; 1546 buf.w[1] = ctx->kma.param.j0.w[1]; 1547 buf.w[2] = ctx->kma.param.j0.w[2]; 1548 buf.w[3] = ctx->kma.param.cv.w + 1; 1549 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k); 1550 } 1551 1552 n = ctx->mreslen; 1553 for (i = 0; i < rem; i++) { 1554 ctx->mres[n + i] = in[i]; 1555 out[i] = in[i] ^ ctx->kres[n + i]; 1556 } 1557 1558 ctx->mreslen += rem; 1559 } 1560 return 0; 1561 } 1562 1563 /*- 1564 * Initialize context structure. Code is big-endian. 1565 */ 1566 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx, 1567 const unsigned char *iv) 1568 { 1569 ctx->kma.param.t.g[0] = 0; 1570 ctx->kma.param.t.g[1] = 0; 1571 ctx->kma.param.tpcl = 0; 1572 ctx->kma.param.taadl = 0; 1573 ctx->mreslen = 0; 1574 ctx->areslen = 0; 1575 ctx->kreslen = 0; 1576 1577 if (ctx->ivlen == 12) { 1578 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen); 1579 ctx->kma.param.j0.w[3] = 1; 1580 ctx->kma.param.cv.w = 1; 1581 } else { 1582 /* ctx->iv has the right size and is already padded. */ 1583 memcpy(ctx->iv, iv, ctx->ivlen); 1584 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL, 1585 ctx->fc, &ctx->kma.param); 1586 ctx->fc |= S390X_KMA_HS; 1587 1588 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0]; 1589 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1]; 1590 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3]; 1591 ctx->kma.param.t.g[0] = 0; 1592 ctx->kma.param.t.g[1] = 0; 1593 } 1594 } 1595 1596 /*- 1597 * Performs various operations on the context structure depending on control 1598 * type. Returns 1 for success, 0 for failure and -1 for unknown control type. 1599 * Code is big-endian. 1600 */ 1601 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 1602 { 1603 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c); 1604 S390X_AES_GCM_CTX *gctx_out; 1605 EVP_CIPHER_CTX *out; 1606 unsigned char *buf, *iv; 1607 int ivlen, enc, len; 1608 1609 switch (type) { 1610 case EVP_CTRL_INIT: 1611 ivlen = EVP_CIPHER_iv_length(c->cipher); 1612 iv = EVP_CIPHER_CTX_iv_noconst(c); 1613 gctx->key_set = 0; 1614 gctx->iv_set = 0; 1615 gctx->ivlen = ivlen; 1616 gctx->iv = iv; 1617 gctx->taglen = -1; 1618 gctx->iv_gen = 0; 1619 gctx->tls_aad_len = -1; 1620 return 1; 1621 1622 case EVP_CTRL_GET_IVLEN: 1623 *(int *)ptr = gctx->ivlen; 1624 return 1; 1625 1626 case EVP_CTRL_AEAD_SET_IVLEN: 1627 if (arg <= 0) 1628 return 0; 1629 1630 if (arg != 12) { 1631 iv = EVP_CIPHER_CTX_iv_noconst(c); 1632 len = S390X_gcm_ivpadlen(arg); 1633 1634 /* Allocate memory for iv if needed. */ 1635 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) { 1636 if (gctx->iv != iv) 1637 OPENSSL_free(gctx->iv); 1638 1639 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) { 1640 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 1641 return 0; 1642 } 1643 } 1644 /* Add padding. */ 1645 memset(gctx->iv + arg, 0, len - arg - 8); 1646 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3; 1647 } 1648 gctx->ivlen = arg; 1649 return 1; 1650 1651 case EVP_CTRL_AEAD_SET_TAG: 1652 buf = EVP_CIPHER_CTX_buf_noconst(c); 1653 enc = EVP_CIPHER_CTX_encrypting(c); 1654 if (arg <= 0 || arg > 16 || enc) 1655 return 0; 1656 1657 memcpy(buf, ptr, arg); 1658 gctx->taglen = arg; 1659 return 1; 1660 1661 case EVP_CTRL_AEAD_GET_TAG: 1662 enc = EVP_CIPHER_CTX_encrypting(c); 1663 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0) 1664 return 0; 1665 1666 memcpy(ptr, gctx->kma.param.t.b, arg); 1667 return 1; 1668 1669 case EVP_CTRL_GCM_SET_IV_FIXED: 1670 /* Special case: -1 length restores whole iv */ 1671 if (arg == -1) { 1672 memcpy(gctx->iv, ptr, gctx->ivlen); 1673 gctx->iv_gen = 1; 1674 return 1; 1675 } 1676 /* 1677 * Fixed field must be at least 4 bytes and invocation field at least 1678 * 8. 1679 */ 1680 if ((arg < 4) || (gctx->ivlen - arg) < 8) 1681 return 0; 1682 1683 if (arg) 1684 memcpy(gctx->iv, ptr, arg); 1685 1686 enc = EVP_CIPHER_CTX_encrypting(c); 1687 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) 1688 return 0; 1689 1690 gctx->iv_gen = 1; 1691 return 1; 1692 1693 case EVP_CTRL_GCM_IV_GEN: 1694 if (gctx->iv_gen == 0 || gctx->key_set == 0) 1695 return 0; 1696 1697 s390x_aes_gcm_setiv(gctx, gctx->iv); 1698 1699 if (arg <= 0 || arg > gctx->ivlen) 1700 arg = gctx->ivlen; 1701 1702 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); 1703 /* 1704 * Invocation field will be at least 8 bytes in size and so no need 1705 * to check wrap around or increment more than last 8 bytes. 1706 */ 1707 ctr64_inc(gctx->iv + gctx->ivlen - 8); 1708 gctx->iv_set = 1; 1709 return 1; 1710 1711 case EVP_CTRL_GCM_SET_IV_INV: 1712 enc = EVP_CIPHER_CTX_encrypting(c); 1713 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc) 1714 return 0; 1715 1716 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); 1717 s390x_aes_gcm_setiv(gctx, gctx->iv); 1718 gctx->iv_set = 1; 1719 return 1; 1720 1721 case EVP_CTRL_AEAD_TLS1_AAD: 1722 /* Save the aad for later use. */ 1723 if (arg != EVP_AEAD_TLS1_AAD_LEN) 1724 return 0; 1725 1726 buf = EVP_CIPHER_CTX_buf_noconst(c); 1727 memcpy(buf, ptr, arg); 1728 gctx->tls_aad_len = arg; 1729 1730 len = buf[arg - 2] << 8 | buf[arg - 1]; 1731 /* Correct length for explicit iv. */ 1732 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) 1733 return 0; 1734 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; 1735 1736 /* If decrypting correct for tag too. */ 1737 enc = EVP_CIPHER_CTX_encrypting(c); 1738 if (!enc) { 1739 if (len < EVP_GCM_TLS_TAG_LEN) 1740 return 0; 1741 len -= EVP_GCM_TLS_TAG_LEN; 1742 } 1743 buf[arg - 2] = len >> 8; 1744 buf[arg - 1] = len & 0xff; 1745 /* Extra padding: tag appended to record. */ 1746 return EVP_GCM_TLS_TAG_LEN; 1747 1748 case EVP_CTRL_COPY: 1749 out = ptr; 1750 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out); 1751 iv = EVP_CIPHER_CTX_iv_noconst(c); 1752 1753 if (gctx->iv == iv) { 1754 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out); 1755 } else { 1756 len = S390X_gcm_ivpadlen(gctx->ivlen); 1757 1758 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) { 1759 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 1760 return 0; 1761 } 1762 1763 memcpy(gctx_out->iv, gctx->iv, len); 1764 } 1765 return 1; 1766 1767 default: 1768 return -1; 1769 } 1770 } 1771 1772 /*- 1773 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned. 1774 */ 1775 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx, 1776 const unsigned char *key, 1777 const unsigned char *iv, int enc) 1778 { 1779 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1780 int keylen; 1781 1782 if (iv == NULL && key == NULL) 1783 return 1; 1784 1785 if (key != NULL) { 1786 keylen = EVP_CIPHER_CTX_key_length(ctx); 1787 memcpy(&gctx->kma.param.k, key, keylen); 1788 1789 gctx->fc = S390X_AES_FC(keylen); 1790 if (!enc) 1791 gctx->fc |= S390X_DECRYPT; 1792 1793 if (iv == NULL && gctx->iv_set) 1794 iv = gctx->iv; 1795 1796 if (iv != NULL) { 1797 s390x_aes_gcm_setiv(gctx, iv); 1798 gctx->iv_set = 1; 1799 } 1800 gctx->key_set = 1; 1801 } else { 1802 if (gctx->key_set) 1803 s390x_aes_gcm_setiv(gctx, iv); 1804 else 1805 memcpy(gctx->iv, iv, gctx->ivlen); 1806 1807 gctx->iv_set = 1; 1808 gctx->iv_gen = 0; 1809 } 1810 return 1; 1811 } 1812 1813 /*- 1814 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written 1815 * if successful. Otherwise -1 is returned. Code is big-endian. 1816 */ 1817 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1818 const unsigned char *in, size_t len) 1819 { 1820 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1821 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx); 1822 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 1823 int rv = -1; 1824 1825 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN)) 1826 return -1; 1827 1828 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN 1829 : EVP_CTRL_GCM_SET_IV_INV, 1830 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) 1831 goto err; 1832 1833 in += EVP_GCM_TLS_EXPLICIT_IV_LEN; 1834 out += EVP_GCM_TLS_EXPLICIT_IV_LEN; 1835 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 1836 1837 gctx->kma.param.taadl = gctx->tls_aad_len << 3; 1838 gctx->kma.param.tpcl = len << 3; 1839 s390x_kma(buf, gctx->tls_aad_len, in, len, out, 1840 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param); 1841 1842 if (enc) { 1843 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN); 1844 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 1845 } else { 1846 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len, 1847 EVP_GCM_TLS_TAG_LEN)) { 1848 OPENSSL_cleanse(out, len); 1849 goto err; 1850 } 1851 rv = len; 1852 } 1853 err: 1854 gctx->iv_set = 0; 1855 gctx->tls_aad_len = -1; 1856 return rv; 1857 } 1858 1859 /*- 1860 * Called from EVP layer to initialize context, process additional 1861 * authenticated data, en/de-crypt plain/cipher-text and authenticate 1862 * ciphertext or process a TLS packet, depending on context. Returns bytes 1863 * written on success. Otherwise -1 is returned. Code is big-endian. 1864 */ 1865 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1866 const unsigned char *in, size_t len) 1867 { 1868 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1869 unsigned char *buf, tmp[16]; 1870 int enc; 1871 1872 if (!gctx->key_set) 1873 return -1; 1874 1875 if (gctx->tls_aad_len >= 0) 1876 return s390x_aes_gcm_tls_cipher(ctx, out, in, len); 1877 1878 if (!gctx->iv_set) 1879 return -1; 1880 1881 if (in != NULL) { 1882 if (out == NULL) { 1883 if (s390x_aes_gcm_aad(gctx, in, len)) 1884 return -1; 1885 } else { 1886 if (s390x_aes_gcm(gctx, in, out, len)) 1887 return -1; 1888 } 1889 return len; 1890 } else { 1891 gctx->kma.param.taadl <<= 3; 1892 gctx->kma.param.tpcl <<= 3; 1893 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp, 1894 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param); 1895 /* recall that we already did en-/decrypt gctx->mres 1896 * and returned it to caller... */ 1897 OPENSSL_cleanse(tmp, gctx->mreslen); 1898 gctx->iv_set = 0; 1899 1900 enc = EVP_CIPHER_CTX_encrypting(ctx); 1901 if (enc) { 1902 gctx->taglen = 16; 1903 } else { 1904 if (gctx->taglen < 0) 1905 return -1; 1906 1907 buf = EVP_CIPHER_CTX_buf_noconst(ctx); 1908 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen)) 1909 return -1; 1910 } 1911 return 0; 1912 } 1913 } 1914 1915 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c) 1916 { 1917 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c); 1918 const unsigned char *iv; 1919 1920 if (gctx == NULL) 1921 return 0; 1922 1923 iv = EVP_CIPHER_CTX_iv(c); 1924 if (iv != gctx->iv) 1925 OPENSSL_free(gctx->iv); 1926 1927 OPENSSL_cleanse(gctx, sizeof(*gctx)); 1928 return 1; 1929 } 1930 1931 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX 1932 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */ 1933 # define S390X_aes_256_xts_CAPABLE 1 1934 1935 # define s390x_aes_xts_init_key aes_xts_init_key 1936 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx, 1937 const unsigned char *key, 1938 const unsigned char *iv, int enc); 1939 # define s390x_aes_xts_cipher aes_xts_cipher 1940 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1941 const unsigned char *in, size_t len); 1942 # define s390x_aes_xts_ctrl aes_xts_ctrl 1943 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr); 1944 # define s390x_aes_xts_cleanup aes_xts_cleanup 1945 1946 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \ 1947 (OPENSSL_s390xcap_P.kmac[0] & \ 1948 S390X_CAPBIT(S390X_AES_128))) 1949 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \ 1950 (OPENSSL_s390xcap_P.kmac[0] & \ 1951 S390X_CAPBIT(S390X_AES_192))) 1952 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \ 1953 (OPENSSL_s390xcap_P.kmac[0] & \ 1954 S390X_CAPBIT(S390X_AES_256))) 1955 1956 # define S390X_CCM_AAD_FLAG 0x40 1957 1958 /*- 1959 * Set nonce and length fields. Code is big-endian. 1960 */ 1961 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx, 1962 const unsigned char *nonce, 1963 size_t mlen) 1964 { 1965 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG; 1966 ctx->aes.ccm.nonce.g[1] = mlen; 1967 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l); 1968 } 1969 1970 /*- 1971 * Process additional authenticated data. Code is big-endian. 1972 */ 1973 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad, 1974 size_t alen) 1975 { 1976 unsigned char *ptr; 1977 int i, rem; 1978 1979 if (!alen) 1980 return; 1981 1982 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG; 1983 1984 /* Suppress 'type-punned pointer dereference' warning. */ 1985 ptr = ctx->aes.ccm.buf.b; 1986 1987 if (alen < ((1 << 16) - (1 << 8))) { 1988 *(uint16_t *)ptr = alen; 1989 i = 2; 1990 } else if (sizeof(alen) == 8 1991 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) { 1992 *(uint16_t *)ptr = 0xffff; 1993 *(uint64_t *)(ptr + 2) = alen; 1994 i = 10; 1995 } else { 1996 *(uint16_t *)ptr = 0xfffe; 1997 *(uint32_t *)(ptr + 2) = alen; 1998 i = 6; 1999 } 2000 2001 while (i < 16 && alen) { 2002 ctx->aes.ccm.buf.b[i] = *aad; 2003 ++aad; 2004 --alen; 2005 ++i; 2006 } 2007 while (i < 16) { 2008 ctx->aes.ccm.buf.b[i] = 0; 2009 ++i; 2010 } 2011 2012 ctx->aes.ccm.kmac_param.icv.g[0] = 0; 2013 ctx->aes.ccm.kmac_param.icv.g[1] = 0; 2014 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc, 2015 &ctx->aes.ccm.kmac_param); 2016 ctx->aes.ccm.blocks += 2; 2017 2018 rem = alen & 0xf; 2019 alen &= ~(size_t)0xf; 2020 if (alen) { 2021 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 2022 ctx->aes.ccm.blocks += alen >> 4; 2023 aad += alen; 2024 } 2025 if (rem) { 2026 for (i = 0; i < rem; i++) 2027 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i]; 2028 2029 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 2030 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 2031 ctx->aes.ccm.kmac_param.k); 2032 ctx->aes.ccm.blocks++; 2033 } 2034 } 2035 2036 /*- 2037 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for 2038 * success. 2039 */ 2040 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in, 2041 unsigned char *out, size_t len, int enc) 2042 { 2043 size_t n, rem; 2044 unsigned int i, l, num; 2045 unsigned char flags; 2046 2047 flags = ctx->aes.ccm.nonce.b[0]; 2048 if (!(flags & S390X_CCM_AAD_FLAG)) { 2049 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b, 2050 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k); 2051 ctx->aes.ccm.blocks++; 2052 } 2053 l = flags & 0x7; 2054 ctx->aes.ccm.nonce.b[0] = l; 2055 2056 /*- 2057 * Reconstruct length from encoded length field 2058 * and initialize it with counter value. 2059 */ 2060 n = 0; 2061 for (i = 15 - l; i < 15; i++) { 2062 n |= ctx->aes.ccm.nonce.b[i]; 2063 ctx->aes.ccm.nonce.b[i] = 0; 2064 n <<= 8; 2065 } 2066 n |= ctx->aes.ccm.nonce.b[15]; 2067 ctx->aes.ccm.nonce.b[15] = 1; 2068 2069 if (n != len) 2070 return -1; /* length mismatch */ 2071 2072 if (enc) { 2073 /* Two operations per block plus one for tag encryption */ 2074 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1; 2075 if (ctx->aes.ccm.blocks > (1ULL << 61)) 2076 return -2; /* too much data */ 2077 } 2078 2079 num = 0; 2080 rem = len & 0xf; 2081 len &= ~(size_t)0xf; 2082 2083 if (enc) { 2084 /* mac-then-encrypt */ 2085 if (len) 2086 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 2087 if (rem) { 2088 for (i = 0; i < rem; i++) 2089 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i]; 2090 2091 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 2092 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 2093 ctx->aes.ccm.kmac_param.k); 2094 } 2095 2096 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k, 2097 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b, 2098 &num, (ctr128_f)AES_ctr32_encrypt); 2099 } else { 2100 /* decrypt-then-mac */ 2101 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k, 2102 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b, 2103 &num, (ctr128_f)AES_ctr32_encrypt); 2104 2105 if (len) 2106 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 2107 if (rem) { 2108 for (i = 0; i < rem; i++) 2109 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i]; 2110 2111 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 2112 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 2113 ctx->aes.ccm.kmac_param.k); 2114 } 2115 } 2116 /* encrypt tag */ 2117 for (i = 15 - l; i < 16; i++) 2118 ctx->aes.ccm.nonce.b[i] = 0; 2119 2120 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc, 2121 ctx->aes.ccm.kmac_param.k); 2122 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0]; 2123 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1]; 2124 2125 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */ 2126 return 0; 2127 } 2128 2129 /*- 2130 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written 2131 * if successful. Otherwise -1 is returned. 2132 */ 2133 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2134 const unsigned char *in, size_t len) 2135 { 2136 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2137 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2138 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx); 2139 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 2140 2141 if (out != in 2142 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m)) 2143 return -1; 2144 2145 if (enc) { 2146 /* Set explicit iv (sequence number). */ 2147 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN); 2148 } 2149 2150 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m; 2151 /*- 2152 * Get explicit iv (sequence number). We already have fixed iv 2153 * (server/client_write_iv) here. 2154 */ 2155 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN); 2156 s390x_aes_ccm_setiv(cctx, ivec, len); 2157 2158 /* Process aad (sequence number|type|version|length) */ 2159 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len); 2160 2161 in += EVP_CCM_TLS_EXPLICIT_IV_LEN; 2162 out += EVP_CCM_TLS_EXPLICIT_IV_LEN; 2163 2164 if (enc) { 2165 if (s390x_aes_ccm(cctx, in, out, len, enc)) 2166 return -1; 2167 2168 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m); 2169 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m; 2170 } else { 2171 if (!s390x_aes_ccm(cctx, in, out, len, enc)) { 2172 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len, 2173 cctx->aes.ccm.m)) 2174 return len; 2175 } 2176 2177 OPENSSL_cleanse(out, len); 2178 return -1; 2179 } 2180 } 2181 2182 /*- 2183 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is 2184 * returned. 2185 */ 2186 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx, 2187 const unsigned char *key, 2188 const unsigned char *iv, int enc) 2189 { 2190 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2191 unsigned char *ivec; 2192 int keylen; 2193 2194 if (iv == NULL && key == NULL) 2195 return 1; 2196 2197 if (key != NULL) { 2198 keylen = EVP_CIPHER_CTX_key_length(ctx); 2199 cctx->aes.ccm.fc = S390X_AES_FC(keylen); 2200 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen); 2201 2202 /* Store encoded m and l. */ 2203 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7) 2204 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3; 2205 memset(cctx->aes.ccm.nonce.b + 1, 0, 2206 sizeof(cctx->aes.ccm.nonce.b)); 2207 cctx->aes.ccm.blocks = 0; 2208 2209 cctx->aes.ccm.key_set = 1; 2210 } 2211 2212 if (iv != NULL) { 2213 ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2214 memcpy(ivec, iv, 15 - cctx->aes.ccm.l); 2215 2216 cctx->aes.ccm.iv_set = 1; 2217 } 2218 2219 return 1; 2220 } 2221 2222 /*- 2223 * Called from EVP layer to initialize context, process additional 2224 * authenticated data, en/de-crypt plain/cipher-text and authenticate 2225 * plaintext or process a TLS packet, depending on context. Returns bytes 2226 * written on success. Otherwise -1 is returned. 2227 */ 2228 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2229 const unsigned char *in, size_t len) 2230 { 2231 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2232 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 2233 int rv; 2234 unsigned char *buf, *ivec; 2235 2236 if (!cctx->aes.ccm.key_set) 2237 return -1; 2238 2239 if (cctx->aes.ccm.tls_aad_len >= 0) 2240 return s390x_aes_ccm_tls_cipher(ctx, out, in, len); 2241 2242 /*- 2243 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt 2244 * so integrity must be checked already at Update() i.e., before 2245 * potentially corrupted data is output. 2246 */ 2247 if (in == NULL && out != NULL) 2248 return 0; 2249 2250 if (!cctx->aes.ccm.iv_set) 2251 return -1; 2252 2253 if (out == NULL) { 2254 /* Update(): Pass message length. */ 2255 if (in == NULL) { 2256 ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2257 s390x_aes_ccm_setiv(cctx, ivec, len); 2258 2259 cctx->aes.ccm.len_set = 1; 2260 return len; 2261 } 2262 2263 /* Update(): Process aad. */ 2264 if (!cctx->aes.ccm.len_set && len) 2265 return -1; 2266 2267 s390x_aes_ccm_aad(cctx, in, len); 2268 return len; 2269 } 2270 2271 /* The tag must be set before actually decrypting data */ 2272 if (!enc && !cctx->aes.ccm.tag_set) 2273 return -1; 2274 2275 /* Update(): Process message. */ 2276 2277 if (!cctx->aes.ccm.len_set) { 2278 /*- 2279 * In case message length was not previously set explicitly via 2280 * Update(), set it now. 2281 */ 2282 ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2283 s390x_aes_ccm_setiv(cctx, ivec, len); 2284 2285 cctx->aes.ccm.len_set = 1; 2286 } 2287 2288 if (enc) { 2289 if (s390x_aes_ccm(cctx, in, out, len, enc)) 2290 return -1; 2291 2292 cctx->aes.ccm.tag_set = 1; 2293 return len; 2294 } else { 2295 rv = -1; 2296 2297 if (!s390x_aes_ccm(cctx, in, out, len, enc)) { 2298 buf = EVP_CIPHER_CTX_buf_noconst(ctx); 2299 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf, 2300 cctx->aes.ccm.m)) 2301 rv = len; 2302 } 2303 2304 if (rv == -1) 2305 OPENSSL_cleanse(out, len); 2306 2307 cctx->aes.ccm.iv_set = 0; 2308 cctx->aes.ccm.tag_set = 0; 2309 cctx->aes.ccm.len_set = 0; 2310 return rv; 2311 } 2312 } 2313 2314 /*- 2315 * Performs various operations on the context structure depending on control 2316 * type. Returns 1 for success, 0 for failure and -1 for unknown control type. 2317 * Code is big-endian. 2318 */ 2319 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 2320 { 2321 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c); 2322 unsigned char *buf, *iv; 2323 int enc, len; 2324 2325 switch (type) { 2326 case EVP_CTRL_INIT: 2327 cctx->aes.ccm.key_set = 0; 2328 cctx->aes.ccm.iv_set = 0; 2329 cctx->aes.ccm.l = 8; 2330 cctx->aes.ccm.m = 12; 2331 cctx->aes.ccm.tag_set = 0; 2332 cctx->aes.ccm.len_set = 0; 2333 cctx->aes.ccm.tls_aad_len = -1; 2334 return 1; 2335 2336 case EVP_CTRL_GET_IVLEN: 2337 *(int *)ptr = 15 - cctx->aes.ccm.l; 2338 return 1; 2339 2340 case EVP_CTRL_AEAD_TLS1_AAD: 2341 if (arg != EVP_AEAD_TLS1_AAD_LEN) 2342 return 0; 2343 2344 /* Save the aad for later use. */ 2345 buf = EVP_CIPHER_CTX_buf_noconst(c); 2346 memcpy(buf, ptr, arg); 2347 cctx->aes.ccm.tls_aad_len = arg; 2348 2349 len = buf[arg - 2] << 8 | buf[arg - 1]; 2350 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN) 2351 return 0; 2352 2353 /* Correct length for explicit iv. */ 2354 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN; 2355 2356 enc = EVP_CIPHER_CTX_encrypting(c); 2357 if (!enc) { 2358 if (len < cctx->aes.ccm.m) 2359 return 0; 2360 2361 /* Correct length for tag. */ 2362 len -= cctx->aes.ccm.m; 2363 } 2364 2365 buf[arg - 2] = len >> 8; 2366 buf[arg - 1] = len & 0xff; 2367 2368 /* Extra padding: tag appended to record. */ 2369 return cctx->aes.ccm.m; 2370 2371 case EVP_CTRL_CCM_SET_IV_FIXED: 2372 if (arg != EVP_CCM_TLS_FIXED_IV_LEN) 2373 return 0; 2374 2375 /* Copy to first part of the iv. */ 2376 iv = EVP_CIPHER_CTX_iv_noconst(c); 2377 memcpy(iv, ptr, arg); 2378 return 1; 2379 2380 case EVP_CTRL_AEAD_SET_IVLEN: 2381 arg = 15 - arg; 2382 /* fall-through */ 2383 2384 case EVP_CTRL_CCM_SET_L: 2385 if (arg < 2 || arg > 8) 2386 return 0; 2387 2388 cctx->aes.ccm.l = arg; 2389 return 1; 2390 2391 case EVP_CTRL_AEAD_SET_TAG: 2392 if ((arg & 1) || arg < 4 || arg > 16) 2393 return 0; 2394 2395 enc = EVP_CIPHER_CTX_encrypting(c); 2396 if (enc && ptr) 2397 return 0; 2398 2399 if (ptr) { 2400 cctx->aes.ccm.tag_set = 1; 2401 buf = EVP_CIPHER_CTX_buf_noconst(c); 2402 memcpy(buf, ptr, arg); 2403 } 2404 2405 cctx->aes.ccm.m = arg; 2406 return 1; 2407 2408 case EVP_CTRL_AEAD_GET_TAG: 2409 enc = EVP_CIPHER_CTX_encrypting(c); 2410 if (!enc || !cctx->aes.ccm.tag_set) 2411 return 0; 2412 2413 if(arg < cctx->aes.ccm.m) 2414 return 0; 2415 2416 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m); 2417 cctx->aes.ccm.tag_set = 0; 2418 cctx->aes.ccm.iv_set = 0; 2419 cctx->aes.ccm.len_set = 0; 2420 return 1; 2421 2422 case EVP_CTRL_COPY: 2423 return 1; 2424 2425 default: 2426 return -1; 2427 } 2428 } 2429 2430 # define s390x_aes_ccm_cleanup aes_ccm_cleanup 2431 2432 # ifndef OPENSSL_NO_OCB 2433 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX 2434 # define S390X_aes_128_ocb_CAPABLE 0 2435 # define S390X_aes_192_ocb_CAPABLE 0 2436 # define S390X_aes_256_ocb_CAPABLE 0 2437 2438 # define s390x_aes_ocb_init_key aes_ocb_init_key 2439 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 2440 const unsigned char *iv, int enc); 2441 # define s390x_aes_ocb_cipher aes_ocb_cipher 2442 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2443 const unsigned char *in, size_t len); 2444 # define s390x_aes_ocb_cleanup aes_ocb_cleanup 2445 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *); 2446 # define s390x_aes_ocb_ctrl aes_ocb_ctrl 2447 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr); 2448 # endif 2449 2450 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \ 2451 MODE,flags) \ 2452 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \ 2453 nid##_##keylen##_##nmode,blocksize, \ 2454 keylen / 8, \ 2455 ivlen, \ 2456 flags | EVP_CIPH_##MODE##_MODE, \ 2457 s390x_aes_##mode##_init_key, \ 2458 s390x_aes_##mode##_cipher, \ 2459 NULL, \ 2460 sizeof(S390X_AES_##MODE##_CTX), \ 2461 NULL, \ 2462 NULL, \ 2463 NULL, \ 2464 NULL \ 2465 }; \ 2466 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2467 nid##_##keylen##_##nmode, \ 2468 blocksize, \ 2469 keylen / 8, \ 2470 ivlen, \ 2471 flags | EVP_CIPH_##MODE##_MODE, \ 2472 aes_init_key, \ 2473 aes_##mode##_cipher, \ 2474 NULL, \ 2475 sizeof(EVP_AES_KEY), \ 2476 NULL, \ 2477 NULL, \ 2478 NULL, \ 2479 NULL \ 2480 }; \ 2481 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2482 { \ 2483 return S390X_aes_##keylen##_##mode##_CAPABLE ? \ 2484 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \ 2485 } 2486 2487 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\ 2488 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \ 2489 nid##_##keylen##_##mode, \ 2490 blocksize, \ 2491 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \ 2492 ivlen, \ 2493 flags | EVP_CIPH_##MODE##_MODE, \ 2494 s390x_aes_##mode##_init_key, \ 2495 s390x_aes_##mode##_cipher, \ 2496 s390x_aes_##mode##_cleanup, \ 2497 sizeof(S390X_AES_##MODE##_CTX), \ 2498 NULL, \ 2499 NULL, \ 2500 s390x_aes_##mode##_ctrl, \ 2501 NULL \ 2502 }; \ 2503 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2504 nid##_##keylen##_##mode,blocksize, \ 2505 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \ 2506 ivlen, \ 2507 flags | EVP_CIPH_##MODE##_MODE, \ 2508 aes_##mode##_init_key, \ 2509 aes_##mode##_cipher, \ 2510 aes_##mode##_cleanup, \ 2511 sizeof(EVP_AES_##MODE##_CTX), \ 2512 NULL, \ 2513 NULL, \ 2514 aes_##mode##_ctrl, \ 2515 NULL \ 2516 }; \ 2517 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2518 { \ 2519 return S390X_aes_##keylen##_##mode##_CAPABLE ? \ 2520 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \ 2521 } 2522 2523 #else 2524 2525 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 2526 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2527 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 2528 flags|EVP_CIPH_##MODE##_MODE, \ 2529 aes_init_key, \ 2530 aes_##mode##_cipher, \ 2531 NULL, \ 2532 sizeof(EVP_AES_KEY), \ 2533 NULL,NULL,NULL,NULL }; \ 2534 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2535 { return &aes_##keylen##_##mode; } 2536 2537 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 2538 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2539 nid##_##keylen##_##mode,blocksize, \ 2540 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 2541 flags|EVP_CIPH_##MODE##_MODE, \ 2542 aes_##mode##_init_key, \ 2543 aes_##mode##_cipher, \ 2544 aes_##mode##_cleanup, \ 2545 sizeof(EVP_AES_##MODE##_CTX), \ 2546 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 2547 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2548 { return &aes_##keylen##_##mode; } 2549 2550 #endif 2551 2552 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__)) 2553 # include "arm_arch.h" 2554 # if __ARM_MAX_ARCH__>=7 2555 # if defined(BSAES_ASM) 2556 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON) 2557 # endif 2558 # if defined(VPAES_ASM) 2559 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON) 2560 # endif 2561 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES) 2562 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key 2563 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key 2564 # define HWAES_encrypt aes_v8_encrypt 2565 # define HWAES_decrypt aes_v8_decrypt 2566 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt 2567 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks 2568 # endif 2569 #endif 2570 2571 #if defined(HWAES_CAPABLE) 2572 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits, 2573 AES_KEY *key); 2574 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits, 2575 AES_KEY *key); 2576 void HWAES_encrypt(const unsigned char *in, unsigned char *out, 2577 const AES_KEY *key); 2578 void HWAES_decrypt(const unsigned char *in, unsigned char *out, 2579 const AES_KEY *key); 2580 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out, 2581 size_t length, const AES_KEY *key, 2582 unsigned char *ivec, const int enc); 2583 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, 2584 size_t len, const AES_KEY *key, 2585 const unsigned char ivec[16]); 2586 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out, 2587 size_t len, const AES_KEY *key1, 2588 const AES_KEY *key2, const unsigned char iv[16]); 2589 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out, 2590 size_t len, const AES_KEY *key1, 2591 const AES_KEY *key2, const unsigned char iv[16]); 2592 #endif 2593 2594 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \ 2595 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2596 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2597 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2598 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2599 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \ 2600 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \ 2601 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags) 2602 2603 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 2604 const unsigned char *iv, int enc) 2605 { 2606 int ret, mode; 2607 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2608 2609 mode = EVP_CIPHER_CTX_mode(ctx); 2610 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 2611 && !enc) { 2612 #ifdef HWAES_CAPABLE 2613 if (HWAES_CAPABLE) { 2614 ret = HWAES_set_decrypt_key(key, 2615 EVP_CIPHER_CTX_key_length(ctx) * 8, 2616 &dat->ks.ks); 2617 dat->block = (block128_f) HWAES_decrypt; 2618 dat->stream.cbc = NULL; 2619 # ifdef HWAES_cbc_encrypt 2620 if (mode == EVP_CIPH_CBC_MODE) 2621 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt; 2622 # endif 2623 } else 2624 #endif 2625 #ifdef BSAES_CAPABLE 2626 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) { 2627 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2628 &dat->ks.ks); 2629 dat->block = (block128_f) AES_decrypt; 2630 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt; 2631 } else 2632 #endif 2633 #ifdef VPAES_CAPABLE 2634 if (VPAES_CAPABLE) { 2635 ret = vpaes_set_decrypt_key(key, 2636 EVP_CIPHER_CTX_key_length(ctx) * 8, 2637 &dat->ks.ks); 2638 dat->block = (block128_f) vpaes_decrypt; 2639 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2640 (cbc128_f) vpaes_cbc_encrypt : NULL; 2641 } else 2642 #endif 2643 { 2644 ret = AES_set_decrypt_key(key, 2645 EVP_CIPHER_CTX_key_length(ctx) * 8, 2646 &dat->ks.ks); 2647 dat->block = (block128_f) AES_decrypt; 2648 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2649 (cbc128_f) AES_cbc_encrypt : NULL; 2650 } 2651 } else 2652 #ifdef HWAES_CAPABLE 2653 if (HWAES_CAPABLE) { 2654 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2655 &dat->ks.ks); 2656 dat->block = (block128_f) HWAES_encrypt; 2657 dat->stream.cbc = NULL; 2658 # ifdef HWAES_cbc_encrypt 2659 if (mode == EVP_CIPH_CBC_MODE) 2660 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt; 2661 else 2662 # endif 2663 # ifdef HWAES_ctr32_encrypt_blocks 2664 if (mode == EVP_CIPH_CTR_MODE) 2665 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; 2666 else 2667 # endif 2668 (void)0; /* terminate potentially open 'else' */ 2669 } else 2670 #endif 2671 #ifdef BSAES_CAPABLE 2672 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) { 2673 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2674 &dat->ks.ks); 2675 dat->block = (block128_f) AES_encrypt; 2676 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; 2677 } else 2678 #endif 2679 #ifdef VPAES_CAPABLE 2680 if (VPAES_CAPABLE) { 2681 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2682 &dat->ks.ks); 2683 dat->block = (block128_f) vpaes_encrypt; 2684 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2685 (cbc128_f) vpaes_cbc_encrypt : NULL; 2686 } else 2687 #endif 2688 { 2689 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2690 &dat->ks.ks); 2691 dat->block = (block128_f) AES_encrypt; 2692 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2693 (cbc128_f) AES_cbc_encrypt : NULL; 2694 #ifdef AES_CTR_ASM 2695 if (mode == EVP_CIPH_CTR_MODE) 2696 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt; 2697 #endif 2698 } 2699 2700 if (ret < 0) { 2701 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 2702 return 0; 2703 } 2704 2705 return 1; 2706 } 2707 2708 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2709 const unsigned char *in, size_t len) 2710 { 2711 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2712 2713 if (dat->stream.cbc) 2714 (*dat->stream.cbc) (in, out, len, &dat->ks, 2715 EVP_CIPHER_CTX_iv_noconst(ctx), 2716 EVP_CIPHER_CTX_encrypting(ctx)); 2717 else if (EVP_CIPHER_CTX_encrypting(ctx)) 2718 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, 2719 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block); 2720 else 2721 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, 2722 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block); 2723 2724 return 1; 2725 } 2726 2727 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2728 const unsigned char *in, size_t len) 2729 { 2730 size_t bl = EVP_CIPHER_CTX_block_size(ctx); 2731 size_t i; 2732 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2733 2734 if (len < bl) 2735 return 1; 2736 2737 for (i = 0, len -= bl; i <= len; i += bl) 2738 (*dat->block) (in + i, out + i, &dat->ks); 2739 2740 return 1; 2741 } 2742 2743 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2744 const unsigned char *in, size_t len) 2745 { 2746 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2747 2748 int num = EVP_CIPHER_CTX_num(ctx); 2749 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, 2750 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block); 2751 EVP_CIPHER_CTX_set_num(ctx, num); 2752 return 1; 2753 } 2754 2755 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2756 const unsigned char *in, size_t len) 2757 { 2758 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2759 2760 int num = EVP_CIPHER_CTX_num(ctx); 2761 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks, 2762 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2763 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2764 EVP_CIPHER_CTX_set_num(ctx, num); 2765 return 1; 2766 } 2767 2768 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2769 const unsigned char *in, size_t len) 2770 { 2771 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2772 2773 int num = EVP_CIPHER_CTX_num(ctx); 2774 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks, 2775 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2776 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2777 EVP_CIPHER_CTX_set_num(ctx, num); 2778 return 1; 2779 } 2780 2781 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2782 const unsigned char *in, size_t len) 2783 { 2784 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2785 2786 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) { 2787 int num = EVP_CIPHER_CTX_num(ctx); 2788 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks, 2789 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2790 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2791 EVP_CIPHER_CTX_set_num(ctx, num); 2792 return 1; 2793 } 2794 2795 while (len >= MAXBITCHUNK) { 2796 int num = EVP_CIPHER_CTX_num(ctx); 2797 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks, 2798 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2799 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2800 EVP_CIPHER_CTX_set_num(ctx, num); 2801 len -= MAXBITCHUNK; 2802 out += MAXBITCHUNK; 2803 in += MAXBITCHUNK; 2804 } 2805 if (len) { 2806 int num = EVP_CIPHER_CTX_num(ctx); 2807 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks, 2808 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2809 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2810 EVP_CIPHER_CTX_set_num(ctx, num); 2811 } 2812 2813 return 1; 2814 } 2815 2816 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2817 const unsigned char *in, size_t len) 2818 { 2819 unsigned int num = EVP_CIPHER_CTX_num(ctx); 2820 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2821 2822 if (dat->stream.ctr) 2823 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, 2824 EVP_CIPHER_CTX_iv_noconst(ctx), 2825 EVP_CIPHER_CTX_buf_noconst(ctx), 2826 &num, dat->stream.ctr); 2827 else 2828 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, 2829 EVP_CIPHER_CTX_iv_noconst(ctx), 2830 EVP_CIPHER_CTX_buf_noconst(ctx), &num, 2831 dat->block); 2832 EVP_CIPHER_CTX_set_num(ctx, num); 2833 return 1; 2834 } 2835 2836 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0) 2837 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0) 2838 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0) 2839 2840 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c) 2841 { 2842 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c); 2843 if (gctx == NULL) 2844 return 0; 2845 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm)); 2846 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c)) 2847 OPENSSL_free(gctx->iv); 2848 return 1; 2849 } 2850 2851 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 2852 { 2853 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c); 2854 switch (type) { 2855 case EVP_CTRL_INIT: 2856 gctx->key_set = 0; 2857 gctx->iv_set = 0; 2858 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher); 2859 gctx->iv = c->iv; 2860 gctx->taglen = -1; 2861 gctx->iv_gen = 0; 2862 gctx->tls_aad_len = -1; 2863 return 1; 2864 2865 case EVP_CTRL_GET_IVLEN: 2866 *(int *)ptr = gctx->ivlen; 2867 return 1; 2868 2869 case EVP_CTRL_AEAD_SET_IVLEN: 2870 if (arg <= 0) 2871 return 0; 2872 /* Allocate memory for IV if needed */ 2873 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) { 2874 if (gctx->iv != c->iv) 2875 OPENSSL_free(gctx->iv); 2876 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) { 2877 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 2878 return 0; 2879 } 2880 } 2881 gctx->ivlen = arg; 2882 return 1; 2883 2884 case EVP_CTRL_AEAD_SET_TAG: 2885 if (arg <= 0 || arg > 16 || c->encrypt) 2886 return 0; 2887 memcpy(c->buf, ptr, arg); 2888 gctx->taglen = arg; 2889 return 1; 2890 2891 case EVP_CTRL_AEAD_GET_TAG: 2892 if (arg <= 0 || arg > 16 || !c->encrypt 2893 || gctx->taglen < 0) 2894 return 0; 2895 memcpy(ptr, c->buf, arg); 2896 return 1; 2897 2898 case EVP_CTRL_GET_IV: 2899 if (gctx->iv_gen != 1) 2900 return 0; 2901 if (gctx->ivlen != arg) 2902 return 0; 2903 memcpy(ptr, gctx->iv, arg); 2904 return 1; 2905 2906 case EVP_CTRL_GCM_SET_IV_FIXED: 2907 /* Special case: -1 length restores whole IV */ 2908 if (arg == -1) { 2909 memcpy(gctx->iv, ptr, gctx->ivlen); 2910 gctx->iv_gen = 1; 2911 return 1; 2912 } 2913 /* 2914 * Fixed field must be at least 4 bytes and invocation field at least 2915 * 8. 2916 */ 2917 if ((arg < 4) || (gctx->ivlen - arg) < 8) 2918 return 0; 2919 if (arg) 2920 memcpy(gctx->iv, ptr, arg); 2921 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) 2922 return 0; 2923 gctx->iv_gen = 1; 2924 return 1; 2925 2926 case EVP_CTRL_GCM_IV_GEN: 2927 if (gctx->iv_gen == 0 || gctx->key_set == 0) 2928 return 0; 2929 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); 2930 if (arg <= 0 || arg > gctx->ivlen) 2931 arg = gctx->ivlen; 2932 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); 2933 /* 2934 * Invocation field will be at least 8 bytes in size and so no need 2935 * to check wrap around or increment more than last 8 bytes. 2936 */ 2937 ctr64_inc(gctx->iv + gctx->ivlen - 8); 2938 gctx->iv_set = 1; 2939 return 1; 2940 2941 case EVP_CTRL_GCM_SET_IV_INV: 2942 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) 2943 return 0; 2944 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); 2945 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); 2946 gctx->iv_set = 1; 2947 return 1; 2948 2949 case EVP_CTRL_AEAD_TLS1_AAD: 2950 /* Save the AAD for later use */ 2951 if (arg != EVP_AEAD_TLS1_AAD_LEN) 2952 return 0; 2953 memcpy(c->buf, ptr, arg); 2954 gctx->tls_aad_len = arg; 2955 { 2956 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1]; 2957 /* Correct length for explicit IV */ 2958 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) 2959 return 0; 2960 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; 2961 /* If decrypting correct for tag too */ 2962 if (!c->encrypt) { 2963 if (len < EVP_GCM_TLS_TAG_LEN) 2964 return 0; 2965 len -= EVP_GCM_TLS_TAG_LEN; 2966 } 2967 c->buf[arg - 2] = len >> 8; 2968 c->buf[arg - 1] = len & 0xff; 2969 } 2970 /* Extra padding: tag appended to record */ 2971 return EVP_GCM_TLS_TAG_LEN; 2972 2973 case EVP_CTRL_COPY: 2974 { 2975 EVP_CIPHER_CTX *out = ptr; 2976 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out); 2977 if (gctx->gcm.key) { 2978 if (gctx->gcm.key != &gctx->ks) 2979 return 0; 2980 gctx_out->gcm.key = &gctx_out->ks; 2981 } 2982 if (gctx->iv == c->iv) 2983 gctx_out->iv = out->iv; 2984 else { 2985 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) { 2986 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 2987 return 0; 2988 } 2989 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen); 2990 } 2991 return 1; 2992 } 2993 2994 default: 2995 return -1; 2996 2997 } 2998 } 2999 3000 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3001 const unsigned char *iv, int enc) 3002 { 3003 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 3004 if (!iv && !key) 3005 return 1; 3006 if (key) { 3007 do { 3008 #ifdef HWAES_CAPABLE 3009 if (HWAES_CAPABLE) { 3010 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 3011 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 3012 (block128_f) HWAES_encrypt); 3013 # ifdef HWAES_ctr32_encrypt_blocks 3014 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; 3015 # else 3016 gctx->ctr = NULL; 3017 # endif 3018 break; 3019 } else 3020 #endif 3021 #ifdef BSAES_CAPABLE 3022 if (BSAES_CAPABLE) { 3023 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 3024 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 3025 (block128_f) AES_encrypt); 3026 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; 3027 break; 3028 } else 3029 #endif 3030 #ifdef VPAES_CAPABLE 3031 if (VPAES_CAPABLE) { 3032 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 3033 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 3034 (block128_f) vpaes_encrypt); 3035 gctx->ctr = NULL; 3036 break; 3037 } else 3038 #endif 3039 (void)0; /* terminate potentially open 'else' */ 3040 3041 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 3042 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 3043 (block128_f) AES_encrypt); 3044 #ifdef AES_CTR_ASM 3045 gctx->ctr = (ctr128_f) AES_ctr32_encrypt; 3046 #else 3047 gctx->ctr = NULL; 3048 #endif 3049 } while (0); 3050 3051 /* 3052 * If we have an iv can set it directly, otherwise use saved IV. 3053 */ 3054 if (iv == NULL && gctx->iv_set) 3055 iv = gctx->iv; 3056 if (iv) { 3057 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 3058 gctx->iv_set = 1; 3059 } 3060 gctx->key_set = 1; 3061 } else { 3062 /* If key set use IV, otherwise copy */ 3063 if (gctx->key_set) 3064 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 3065 else 3066 memcpy(gctx->iv, iv, gctx->ivlen); 3067 gctx->iv_set = 1; 3068 gctx->iv_gen = 0; 3069 } 3070 return 1; 3071 } 3072 3073 /* 3074 * Handle TLS GCM packet format. This consists of the last portion of the IV 3075 * followed by the payload and finally the tag. On encrypt generate IV, 3076 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload 3077 * and verify tag. 3078 */ 3079 3080 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3081 const unsigned char *in, size_t len) 3082 { 3083 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 3084 int rv = -1; 3085 /* Encrypt/decrypt must be performed in place */ 3086 if (out != in 3087 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN)) 3088 return -1; 3089 /* 3090 * Set IV from start of buffer or generate IV and write to start of 3091 * buffer. 3092 */ 3093 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN 3094 : EVP_CTRL_GCM_SET_IV_INV, 3095 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) 3096 goto err; 3097 /* Use saved AAD */ 3098 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len)) 3099 goto err; 3100 /* Fix buffer and length to point to payload */ 3101 in += EVP_GCM_TLS_EXPLICIT_IV_LEN; 3102 out += EVP_GCM_TLS_EXPLICIT_IV_LEN; 3103 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 3104 if (ctx->encrypt) { 3105 /* Encrypt payload */ 3106 if (gctx->ctr) { 3107 size_t bulk = 0; 3108 #if defined(AES_GCM_ASM) 3109 if (len >= 32 && AES_GCM_ASM(gctx)) { 3110 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) 3111 return -1; 3112 3113 bulk = AES_gcm_encrypt(in, out, len, 3114 gctx->gcm.key, 3115 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3116 gctx->gcm.len.u[1] += bulk; 3117 } 3118 #endif 3119 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, 3120 in + bulk, 3121 out + bulk, 3122 len - bulk, gctx->ctr)) 3123 goto err; 3124 } else { 3125 size_t bulk = 0; 3126 #if defined(AES_GCM_ASM2) 3127 if (len >= 32 && AES_GCM_ASM2(gctx)) { 3128 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) 3129 return -1; 3130 3131 bulk = AES_gcm_encrypt(in, out, len, 3132 gctx->gcm.key, 3133 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3134 gctx->gcm.len.u[1] += bulk; 3135 } 3136 #endif 3137 if (CRYPTO_gcm128_encrypt(&gctx->gcm, 3138 in + bulk, out + bulk, len - bulk)) 3139 goto err; 3140 } 3141 out += len; 3142 /* Finally write tag */ 3143 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN); 3144 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 3145 } else { 3146 /* Decrypt */ 3147 if (gctx->ctr) { 3148 size_t bulk = 0; 3149 #if defined(AES_GCM_ASM) 3150 if (len >= 16 && AES_GCM_ASM(gctx)) { 3151 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) 3152 return -1; 3153 3154 bulk = AES_gcm_decrypt(in, out, len, 3155 gctx->gcm.key, 3156 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3157 gctx->gcm.len.u[1] += bulk; 3158 } 3159 #endif 3160 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, 3161 in + bulk, 3162 out + bulk, 3163 len - bulk, gctx->ctr)) 3164 goto err; 3165 } else { 3166 size_t bulk = 0; 3167 #if defined(AES_GCM_ASM2) 3168 if (len >= 16 && AES_GCM_ASM2(gctx)) { 3169 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) 3170 return -1; 3171 3172 bulk = AES_gcm_decrypt(in, out, len, 3173 gctx->gcm.key, 3174 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3175 gctx->gcm.len.u[1] += bulk; 3176 } 3177 #endif 3178 if (CRYPTO_gcm128_decrypt(&gctx->gcm, 3179 in + bulk, out + bulk, len - bulk)) 3180 goto err; 3181 } 3182 /* Retrieve tag */ 3183 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN); 3184 /* If tag mismatch wipe buffer */ 3185 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) { 3186 OPENSSL_cleanse(out, len); 3187 goto err; 3188 } 3189 rv = len; 3190 } 3191 3192 err: 3193 gctx->iv_set = 0; 3194 gctx->tls_aad_len = -1; 3195 return rv; 3196 } 3197 3198 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3199 const unsigned char *in, size_t len) 3200 { 3201 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 3202 /* If not set up, return error */ 3203 if (!gctx->key_set) 3204 return -1; 3205 3206 if (gctx->tls_aad_len >= 0) 3207 return aes_gcm_tls_cipher(ctx, out, in, len); 3208 3209 if (!gctx->iv_set) 3210 return -1; 3211 if (in) { 3212 if (out == NULL) { 3213 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len)) 3214 return -1; 3215 } else if (ctx->encrypt) { 3216 if (gctx->ctr) { 3217 size_t bulk = 0; 3218 #if defined(AES_GCM_ASM) 3219 if (len >= 32 && AES_GCM_ASM(gctx)) { 3220 size_t res = (16 - gctx->gcm.mres) % 16; 3221 3222 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) 3223 return -1; 3224 3225 bulk = AES_gcm_encrypt(in + res, 3226 out + res, len - res, 3227 gctx->gcm.key, gctx->gcm.Yi.c, 3228 gctx->gcm.Xi.u); 3229 gctx->gcm.len.u[1] += bulk; 3230 bulk += res; 3231 } 3232 #endif 3233 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, 3234 in + bulk, 3235 out + bulk, 3236 len - bulk, gctx->ctr)) 3237 return -1; 3238 } else { 3239 size_t bulk = 0; 3240 #if defined(AES_GCM_ASM2) 3241 if (len >= 32 && AES_GCM_ASM2(gctx)) { 3242 size_t res = (16 - gctx->gcm.mres) % 16; 3243 3244 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) 3245 return -1; 3246 3247 bulk = AES_gcm_encrypt(in + res, 3248 out + res, len - res, 3249 gctx->gcm.key, gctx->gcm.Yi.c, 3250 gctx->gcm.Xi.u); 3251 gctx->gcm.len.u[1] += bulk; 3252 bulk += res; 3253 } 3254 #endif 3255 if (CRYPTO_gcm128_encrypt(&gctx->gcm, 3256 in + bulk, out + bulk, len - bulk)) 3257 return -1; 3258 } 3259 } else { 3260 if (gctx->ctr) { 3261 size_t bulk = 0; 3262 #if defined(AES_GCM_ASM) 3263 if (len >= 16 && AES_GCM_ASM(gctx)) { 3264 size_t res = (16 - gctx->gcm.mres) % 16; 3265 3266 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) 3267 return -1; 3268 3269 bulk = AES_gcm_decrypt(in + res, 3270 out + res, len - res, 3271 gctx->gcm.key, 3272 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3273 gctx->gcm.len.u[1] += bulk; 3274 bulk += res; 3275 } 3276 #endif 3277 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, 3278 in + bulk, 3279 out + bulk, 3280 len - bulk, gctx->ctr)) 3281 return -1; 3282 } else { 3283 size_t bulk = 0; 3284 #if defined(AES_GCM_ASM2) 3285 if (len >= 16 && AES_GCM_ASM2(gctx)) { 3286 size_t res = (16 - gctx->gcm.mres) % 16; 3287 3288 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) 3289 return -1; 3290 3291 bulk = AES_gcm_decrypt(in + res, 3292 out + res, len - res, 3293 gctx->gcm.key, 3294 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3295 gctx->gcm.len.u[1] += bulk; 3296 bulk += res; 3297 } 3298 #endif 3299 if (CRYPTO_gcm128_decrypt(&gctx->gcm, 3300 in + bulk, out + bulk, len - bulk)) 3301 return -1; 3302 } 3303 } 3304 return len; 3305 } else { 3306 if (!ctx->encrypt) { 3307 if (gctx->taglen < 0) 3308 return -1; 3309 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) 3310 return -1; 3311 gctx->iv_set = 0; 3312 return 0; 3313 } 3314 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16); 3315 gctx->taglen = 16; 3316 /* Don't reuse the IV */ 3317 gctx->iv_set = 0; 3318 return 0; 3319 } 3320 3321 } 3322 3323 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \ 3324 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ 3325 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ 3326 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH) 3327 3328 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM, 3329 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3330 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM, 3331 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3332 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM, 3333 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3334 3335 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3336 { 3337 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c); 3338 3339 if (type == EVP_CTRL_COPY) { 3340 EVP_CIPHER_CTX *out = ptr; 3341 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out); 3342 3343 if (xctx->xts.key1) { 3344 if (xctx->xts.key1 != &xctx->ks1) 3345 return 0; 3346 xctx_out->xts.key1 = &xctx_out->ks1; 3347 } 3348 if (xctx->xts.key2) { 3349 if (xctx->xts.key2 != &xctx->ks2) 3350 return 0; 3351 xctx_out->xts.key2 = &xctx_out->ks2; 3352 } 3353 return 1; 3354 } else if (type != EVP_CTRL_INIT) 3355 return -1; 3356 /* key1 and key2 are used as an indicator both key and IV are set */ 3357 xctx->xts.key1 = NULL; 3358 xctx->xts.key2 = NULL; 3359 return 1; 3360 } 3361 3362 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3363 const unsigned char *iv, int enc) 3364 { 3365 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 3366 3367 if (!iv && !key) 3368 return 1; 3369 3370 if (key) 3371 do { 3372 /* The key is two half length keys in reality */ 3373 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2; 3374 3375 /* 3376 * Verify that the two keys are different. 3377 * 3378 * This addresses the vulnerability described in Rogaway's 3379 * September 2004 paper: 3380 * 3381 * "Efficient Instantiations of Tweakable Blockciphers and 3382 * Refinements to Modes OCB and PMAC". 3383 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf) 3384 * 3385 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states 3386 * that: 3387 * "The check for Key_1 != Key_2 shall be done at any place 3388 * BEFORE using the keys in the XTS-AES algorithm to process 3389 * data with them." 3390 */ 3391 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) { 3392 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS); 3393 return 0; 3394 } 3395 3396 #ifdef AES_XTS_ASM 3397 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt; 3398 #else 3399 xctx->stream = NULL; 3400 #endif 3401 /* key_len is two AES keys */ 3402 #ifdef HWAES_CAPABLE 3403 if (HWAES_CAPABLE) { 3404 if (enc) { 3405 HWAES_set_encrypt_key(key, 3406 EVP_CIPHER_CTX_key_length(ctx) * 4, 3407 &xctx->ks1.ks); 3408 xctx->xts.block1 = (block128_f) HWAES_encrypt; 3409 # ifdef HWAES_xts_encrypt 3410 xctx->stream = HWAES_xts_encrypt; 3411 # endif 3412 } else { 3413 HWAES_set_decrypt_key(key, 3414 EVP_CIPHER_CTX_key_length(ctx) * 4, 3415 &xctx->ks1.ks); 3416 xctx->xts.block1 = (block128_f) HWAES_decrypt; 3417 # ifdef HWAES_xts_decrypt 3418 xctx->stream = HWAES_xts_decrypt; 3419 #endif 3420 } 3421 3422 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3423 EVP_CIPHER_CTX_key_length(ctx) * 4, 3424 &xctx->ks2.ks); 3425 xctx->xts.block2 = (block128_f) HWAES_encrypt; 3426 3427 xctx->xts.key1 = &xctx->ks1; 3428 break; 3429 } else 3430 #endif 3431 #ifdef BSAES_CAPABLE 3432 if (BSAES_CAPABLE) 3433 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt; 3434 else 3435 #endif 3436 #ifdef VPAES_CAPABLE 3437 if (VPAES_CAPABLE) { 3438 if (enc) { 3439 vpaes_set_encrypt_key(key, 3440 EVP_CIPHER_CTX_key_length(ctx) * 4, 3441 &xctx->ks1.ks); 3442 xctx->xts.block1 = (block128_f) vpaes_encrypt; 3443 } else { 3444 vpaes_set_decrypt_key(key, 3445 EVP_CIPHER_CTX_key_length(ctx) * 4, 3446 &xctx->ks1.ks); 3447 xctx->xts.block1 = (block128_f) vpaes_decrypt; 3448 } 3449 3450 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3451 EVP_CIPHER_CTX_key_length(ctx) * 4, 3452 &xctx->ks2.ks); 3453 xctx->xts.block2 = (block128_f) vpaes_encrypt; 3454 3455 xctx->xts.key1 = &xctx->ks1; 3456 break; 3457 } else 3458 #endif 3459 (void)0; /* terminate potentially open 'else' */ 3460 3461 if (enc) { 3462 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 3463 &xctx->ks1.ks); 3464 xctx->xts.block1 = (block128_f) AES_encrypt; 3465 } else { 3466 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 3467 &xctx->ks1.ks); 3468 xctx->xts.block1 = (block128_f) AES_decrypt; 3469 } 3470 3471 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3472 EVP_CIPHER_CTX_key_length(ctx) * 4, 3473 &xctx->ks2.ks); 3474 xctx->xts.block2 = (block128_f) AES_encrypt; 3475 3476 xctx->xts.key1 = &xctx->ks1; 3477 } while (0); 3478 3479 if (iv) { 3480 xctx->xts.key2 = &xctx->ks2; 3481 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 3482 } 3483 3484 return 1; 3485 } 3486 3487 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3488 const unsigned char *in, size_t len) 3489 { 3490 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 3491 if (!xctx->xts.key1 || !xctx->xts.key2) 3492 return 0; 3493 if (!out || !in || len < AES_BLOCK_SIZE) 3494 return 0; 3495 if (xctx->stream) 3496 (*xctx->stream) (in, out, len, 3497 xctx->xts.key1, xctx->xts.key2, 3498 EVP_CIPHER_CTX_iv_noconst(ctx)); 3499 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx), 3500 in, out, len, 3501 EVP_CIPHER_CTX_encrypting(ctx))) 3502 return 0; 3503 return 1; 3504 } 3505 3506 #define aes_xts_cleanup NULL 3507 3508 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \ 3509 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ 3510 | EVP_CIPH_CUSTOM_COPY) 3511 3512 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS) 3513 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS) 3514 3515 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3516 { 3517 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c); 3518 switch (type) { 3519 case EVP_CTRL_INIT: 3520 cctx->key_set = 0; 3521 cctx->iv_set = 0; 3522 cctx->L = 8; 3523 cctx->M = 12; 3524 cctx->tag_set = 0; 3525 cctx->len_set = 0; 3526 cctx->tls_aad_len = -1; 3527 return 1; 3528 case EVP_CTRL_GET_IVLEN: 3529 *(int *)ptr = 15 - cctx->L; 3530 return 1; 3531 case EVP_CTRL_AEAD_TLS1_AAD: 3532 /* Save the AAD for later use */ 3533 if (arg != EVP_AEAD_TLS1_AAD_LEN) 3534 return 0; 3535 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg); 3536 cctx->tls_aad_len = arg; 3537 { 3538 uint16_t len = 3539 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8 3540 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1]; 3541 /* Correct length for explicit IV */ 3542 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN) 3543 return 0; 3544 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN; 3545 /* If decrypting correct for tag too */ 3546 if (!EVP_CIPHER_CTX_encrypting(c)) { 3547 if (len < cctx->M) 3548 return 0; 3549 len -= cctx->M; 3550 } 3551 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8; 3552 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff; 3553 } 3554 /* Extra padding: tag appended to record */ 3555 return cctx->M; 3556 3557 case EVP_CTRL_CCM_SET_IV_FIXED: 3558 /* Sanity check length */ 3559 if (arg != EVP_CCM_TLS_FIXED_IV_LEN) 3560 return 0; 3561 /* Just copy to first part of IV */ 3562 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg); 3563 return 1; 3564 3565 case EVP_CTRL_AEAD_SET_IVLEN: 3566 arg = 15 - arg; 3567 /* fall thru */ 3568 case EVP_CTRL_CCM_SET_L: 3569 if (arg < 2 || arg > 8) 3570 return 0; 3571 cctx->L = arg; 3572 return 1; 3573 3574 case EVP_CTRL_AEAD_SET_TAG: 3575 if ((arg & 1) || arg < 4 || arg > 16) 3576 return 0; 3577 if (EVP_CIPHER_CTX_encrypting(c) && ptr) 3578 return 0; 3579 if (ptr) { 3580 cctx->tag_set = 1; 3581 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg); 3582 } 3583 cctx->M = arg; 3584 return 1; 3585 3586 case EVP_CTRL_AEAD_GET_TAG: 3587 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set) 3588 return 0; 3589 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg)) 3590 return 0; 3591 cctx->tag_set = 0; 3592 cctx->iv_set = 0; 3593 cctx->len_set = 0; 3594 return 1; 3595 3596 case EVP_CTRL_COPY: 3597 { 3598 EVP_CIPHER_CTX *out = ptr; 3599 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out); 3600 if (cctx->ccm.key) { 3601 if (cctx->ccm.key != &cctx->ks) 3602 return 0; 3603 cctx_out->ccm.key = &cctx_out->ks; 3604 } 3605 return 1; 3606 } 3607 3608 default: 3609 return -1; 3610 3611 } 3612 } 3613 3614 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3615 const unsigned char *iv, int enc) 3616 { 3617 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3618 if (!iv && !key) 3619 return 1; 3620 if (key) 3621 do { 3622 #ifdef HWAES_CAPABLE 3623 if (HWAES_CAPABLE) { 3624 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3625 &cctx->ks.ks); 3626 3627 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3628 &cctx->ks, (block128_f) HWAES_encrypt); 3629 cctx->str = NULL; 3630 cctx->key_set = 1; 3631 break; 3632 } else 3633 #endif 3634 #ifdef VPAES_CAPABLE 3635 if (VPAES_CAPABLE) { 3636 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3637 &cctx->ks.ks); 3638 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3639 &cctx->ks, (block128_f) vpaes_encrypt); 3640 cctx->str = NULL; 3641 cctx->key_set = 1; 3642 break; 3643 } 3644 #endif 3645 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3646 &cctx->ks.ks); 3647 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3648 &cctx->ks, (block128_f) AES_encrypt); 3649 cctx->str = NULL; 3650 cctx->key_set = 1; 3651 } while (0); 3652 if (iv) { 3653 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 3654 cctx->iv_set = 1; 3655 } 3656 return 1; 3657 } 3658 3659 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3660 const unsigned char *in, size_t len) 3661 { 3662 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3663 CCM128_CONTEXT *ccm = &cctx->ccm; 3664 /* Encrypt/decrypt must be performed in place */ 3665 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M)) 3666 return -1; 3667 /* If encrypting set explicit IV from sequence number (start of AAD) */ 3668 if (EVP_CIPHER_CTX_encrypting(ctx)) 3669 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx), 3670 EVP_CCM_TLS_EXPLICIT_IV_LEN); 3671 /* Get rest of IV from explicit IV */ 3672 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in, 3673 EVP_CCM_TLS_EXPLICIT_IV_LEN); 3674 /* Correct length value */ 3675 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M; 3676 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L, 3677 len)) 3678 return -1; 3679 /* Use saved AAD */ 3680 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len); 3681 /* Fix buffer to point to payload */ 3682 in += EVP_CCM_TLS_EXPLICIT_IV_LEN; 3683 out += EVP_CCM_TLS_EXPLICIT_IV_LEN; 3684 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3685 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, 3686 cctx->str) : 3687 CRYPTO_ccm128_encrypt(ccm, in, out, len)) 3688 return -1; 3689 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M)) 3690 return -1; 3691 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M; 3692 } else { 3693 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, 3694 cctx->str) : 3695 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) { 3696 unsigned char tag[16]; 3697 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) { 3698 if (!CRYPTO_memcmp(tag, in + len, cctx->M)) 3699 return len; 3700 } 3701 } 3702 OPENSSL_cleanse(out, len); 3703 return -1; 3704 } 3705 } 3706 3707 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3708 const unsigned char *in, size_t len) 3709 { 3710 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3711 CCM128_CONTEXT *ccm = &cctx->ccm; 3712 /* If not set up, return error */ 3713 if (!cctx->key_set) 3714 return -1; 3715 3716 if (cctx->tls_aad_len >= 0) 3717 return aes_ccm_tls_cipher(ctx, out, in, len); 3718 3719 /* EVP_*Final() doesn't return any data */ 3720 if (in == NULL && out != NULL) 3721 return 0; 3722 3723 if (!cctx->iv_set) 3724 return -1; 3725 3726 if (!out) { 3727 if (!in) { 3728 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 3729 15 - cctx->L, len)) 3730 return -1; 3731 cctx->len_set = 1; 3732 return len; 3733 } 3734 /* If have AAD need message length */ 3735 if (!cctx->len_set && len) 3736 return -1; 3737 CRYPTO_ccm128_aad(ccm, in, len); 3738 return len; 3739 } 3740 3741 /* The tag must be set before actually decrypting data */ 3742 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set) 3743 return -1; 3744 3745 /* If not set length yet do it */ 3746 if (!cctx->len_set) { 3747 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 3748 15 - cctx->L, len)) 3749 return -1; 3750 cctx->len_set = 1; 3751 } 3752 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3753 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, 3754 cctx->str) : 3755 CRYPTO_ccm128_encrypt(ccm, in, out, len)) 3756 return -1; 3757 cctx->tag_set = 1; 3758 return len; 3759 } else { 3760 int rv = -1; 3761 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, 3762 cctx->str) : 3763 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) { 3764 unsigned char tag[16]; 3765 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) { 3766 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx), 3767 cctx->M)) 3768 rv = len; 3769 } 3770 } 3771 if (rv == -1) 3772 OPENSSL_cleanse(out, len); 3773 cctx->iv_set = 0; 3774 cctx->tag_set = 0; 3775 cctx->len_set = 0; 3776 return rv; 3777 } 3778 } 3779 3780 #define aes_ccm_cleanup NULL 3781 3782 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM, 3783 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3784 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM, 3785 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3786 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM, 3787 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3788 3789 typedef struct { 3790 union { 3791 double align; 3792 AES_KEY ks; 3793 } ks; 3794 /* Indicates if IV has been set */ 3795 unsigned char *iv; 3796 } EVP_AES_WRAP_CTX; 3797 3798 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3799 const unsigned char *iv, int enc) 3800 { 3801 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx); 3802 if (!iv && !key) 3803 return 1; 3804 if (key) { 3805 if (EVP_CIPHER_CTX_encrypting(ctx)) 3806 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3807 &wctx->ks.ks); 3808 else 3809 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3810 &wctx->ks.ks); 3811 if (!iv) 3812 wctx->iv = NULL; 3813 } 3814 if (iv) { 3815 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx)); 3816 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx); 3817 } 3818 return 1; 3819 } 3820 3821 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3822 const unsigned char *in, size_t inlen) 3823 { 3824 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx); 3825 size_t rv; 3826 /* AES wrap with padding has IV length of 4, without padding 8 */ 3827 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4; 3828 /* No final operation so always return zero length */ 3829 if (!in) 3830 return 0; 3831 /* Input length must always be non-zero */ 3832 if (!inlen) 3833 return -1; 3834 /* If decrypting need at least 16 bytes and multiple of 8 */ 3835 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7)) 3836 return -1; 3837 /* If not padding input must be multiple of 8 */ 3838 if (!pad && inlen & 0x7) 3839 return -1; 3840 if (is_partially_overlapping(out, in, inlen)) { 3841 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING); 3842 return 0; 3843 } 3844 if (!out) { 3845 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3846 /* If padding round up to multiple of 8 */ 3847 if (pad) 3848 inlen = (inlen + 7) / 8 * 8; 3849 /* 8 byte prefix */ 3850 return inlen + 8; 3851 } else { 3852 /* 3853 * If not padding output will be exactly 8 bytes smaller than 3854 * input. If padding it will be at least 8 bytes smaller but we 3855 * don't know how much. 3856 */ 3857 return inlen - 8; 3858 } 3859 } 3860 if (pad) { 3861 if (EVP_CIPHER_CTX_encrypting(ctx)) 3862 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv, 3863 out, in, inlen, 3864 (block128_f) AES_encrypt); 3865 else 3866 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv, 3867 out, in, inlen, 3868 (block128_f) AES_decrypt); 3869 } else { 3870 if (EVP_CIPHER_CTX_encrypting(ctx)) 3871 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv, 3872 out, in, inlen, (block128_f) AES_encrypt); 3873 else 3874 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv, 3875 out, in, inlen, (block128_f) AES_decrypt); 3876 } 3877 return rv ? (int)rv : -1; 3878 } 3879 3880 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \ 3881 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ 3882 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1) 3883 3884 static const EVP_CIPHER aes_128_wrap = { 3885 NID_id_aes128_wrap, 3886 8, 16, 8, WRAP_FLAGS, 3887 aes_wrap_init_key, aes_wrap_cipher, 3888 NULL, 3889 sizeof(EVP_AES_WRAP_CTX), 3890 NULL, NULL, NULL, NULL 3891 }; 3892 3893 const EVP_CIPHER *EVP_aes_128_wrap(void) 3894 { 3895 return &aes_128_wrap; 3896 } 3897 3898 static const EVP_CIPHER aes_192_wrap = { 3899 NID_id_aes192_wrap, 3900 8, 24, 8, WRAP_FLAGS, 3901 aes_wrap_init_key, aes_wrap_cipher, 3902 NULL, 3903 sizeof(EVP_AES_WRAP_CTX), 3904 NULL, NULL, NULL, NULL 3905 }; 3906 3907 const EVP_CIPHER *EVP_aes_192_wrap(void) 3908 { 3909 return &aes_192_wrap; 3910 } 3911 3912 static const EVP_CIPHER aes_256_wrap = { 3913 NID_id_aes256_wrap, 3914 8, 32, 8, WRAP_FLAGS, 3915 aes_wrap_init_key, aes_wrap_cipher, 3916 NULL, 3917 sizeof(EVP_AES_WRAP_CTX), 3918 NULL, NULL, NULL, NULL 3919 }; 3920 3921 const EVP_CIPHER *EVP_aes_256_wrap(void) 3922 { 3923 return &aes_256_wrap; 3924 } 3925 3926 static const EVP_CIPHER aes_128_wrap_pad = { 3927 NID_id_aes128_wrap_pad, 3928 8, 16, 4, WRAP_FLAGS, 3929 aes_wrap_init_key, aes_wrap_cipher, 3930 NULL, 3931 sizeof(EVP_AES_WRAP_CTX), 3932 NULL, NULL, NULL, NULL 3933 }; 3934 3935 const EVP_CIPHER *EVP_aes_128_wrap_pad(void) 3936 { 3937 return &aes_128_wrap_pad; 3938 } 3939 3940 static const EVP_CIPHER aes_192_wrap_pad = { 3941 NID_id_aes192_wrap_pad, 3942 8, 24, 4, WRAP_FLAGS, 3943 aes_wrap_init_key, aes_wrap_cipher, 3944 NULL, 3945 sizeof(EVP_AES_WRAP_CTX), 3946 NULL, NULL, NULL, NULL 3947 }; 3948 3949 const EVP_CIPHER *EVP_aes_192_wrap_pad(void) 3950 { 3951 return &aes_192_wrap_pad; 3952 } 3953 3954 static const EVP_CIPHER aes_256_wrap_pad = { 3955 NID_id_aes256_wrap_pad, 3956 8, 32, 4, WRAP_FLAGS, 3957 aes_wrap_init_key, aes_wrap_cipher, 3958 NULL, 3959 sizeof(EVP_AES_WRAP_CTX), 3960 NULL, NULL, NULL, NULL 3961 }; 3962 3963 const EVP_CIPHER *EVP_aes_256_wrap_pad(void) 3964 { 3965 return &aes_256_wrap_pad; 3966 } 3967 3968 #ifndef OPENSSL_NO_OCB 3969 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3970 { 3971 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c); 3972 EVP_CIPHER_CTX *newc; 3973 EVP_AES_OCB_CTX *new_octx; 3974 3975 switch (type) { 3976 case EVP_CTRL_INIT: 3977 octx->key_set = 0; 3978 octx->iv_set = 0; 3979 octx->ivlen = EVP_CIPHER_iv_length(c->cipher); 3980 octx->iv = EVP_CIPHER_CTX_iv_noconst(c); 3981 octx->taglen = 16; 3982 octx->data_buf_len = 0; 3983 octx->aad_buf_len = 0; 3984 return 1; 3985 3986 case EVP_CTRL_GET_IVLEN: 3987 *(int *)ptr = octx->ivlen; 3988 return 1; 3989 3990 case EVP_CTRL_AEAD_SET_IVLEN: 3991 /* IV len must be 1 to 15 */ 3992 if (arg <= 0 || arg > 15) 3993 return 0; 3994 3995 octx->ivlen = arg; 3996 return 1; 3997 3998 case EVP_CTRL_AEAD_SET_TAG: 3999 if (!ptr) { 4000 /* Tag len must be 0 to 16 */ 4001 if (arg < 0 || arg > 16) 4002 return 0; 4003 4004 octx->taglen = arg; 4005 return 1; 4006 } 4007 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c)) 4008 return 0; 4009 memcpy(octx->tag, ptr, arg); 4010 return 1; 4011 4012 case EVP_CTRL_AEAD_GET_TAG: 4013 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c)) 4014 return 0; 4015 4016 memcpy(ptr, octx->tag, arg); 4017 return 1; 4018 4019 case EVP_CTRL_COPY: 4020 newc = (EVP_CIPHER_CTX *)ptr; 4021 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc); 4022 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb, 4023 &new_octx->ksenc.ks, 4024 &new_octx->ksdec.ks); 4025 4026 default: 4027 return -1; 4028 4029 } 4030 } 4031 4032 # ifdef HWAES_CAPABLE 4033 # ifdef HWAES_ocb_encrypt 4034 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out, 4035 size_t blocks, const void *key, 4036 size_t start_block_num, 4037 unsigned char offset_i[16], 4038 const unsigned char L_[][16], 4039 unsigned char checksum[16]); 4040 # else 4041 # define HWAES_ocb_encrypt ((ocb128_f)NULL) 4042 # endif 4043 # ifdef HWAES_ocb_decrypt 4044 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out, 4045 size_t blocks, const void *key, 4046 size_t start_block_num, 4047 unsigned char offset_i[16], 4048 const unsigned char L_[][16], 4049 unsigned char checksum[16]); 4050 # else 4051 # define HWAES_ocb_decrypt ((ocb128_f)NULL) 4052 # endif 4053 # endif 4054 4055 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 4056 const unsigned char *iv, int enc) 4057 { 4058 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 4059 if (!iv && !key) 4060 return 1; 4061 if (key) { 4062 do { 4063 /* 4064 * We set both the encrypt and decrypt key here because decrypt 4065 * needs both. We could possibly optimise to remove setting the 4066 * decrypt for an encryption operation. 4067 */ 4068 # ifdef HWAES_CAPABLE 4069 if (HWAES_CAPABLE) { 4070 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4071 &octx->ksenc.ks); 4072 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4073 &octx->ksdec.ks); 4074 if (!CRYPTO_ocb128_init(&octx->ocb, 4075 &octx->ksenc.ks, &octx->ksdec.ks, 4076 (block128_f) HWAES_encrypt, 4077 (block128_f) HWAES_decrypt, 4078 enc ? HWAES_ocb_encrypt 4079 : HWAES_ocb_decrypt)) 4080 return 0; 4081 break; 4082 } 4083 # endif 4084 # ifdef VPAES_CAPABLE 4085 if (VPAES_CAPABLE) { 4086 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4087 &octx->ksenc.ks); 4088 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4089 &octx->ksdec.ks); 4090 if (!CRYPTO_ocb128_init(&octx->ocb, 4091 &octx->ksenc.ks, &octx->ksdec.ks, 4092 (block128_f) vpaes_encrypt, 4093 (block128_f) vpaes_decrypt, 4094 NULL)) 4095 return 0; 4096 break; 4097 } 4098 # endif 4099 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4100 &octx->ksenc.ks); 4101 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4102 &octx->ksdec.ks); 4103 if (!CRYPTO_ocb128_init(&octx->ocb, 4104 &octx->ksenc.ks, &octx->ksdec.ks, 4105 (block128_f) AES_encrypt, 4106 (block128_f) AES_decrypt, 4107 NULL)) 4108 return 0; 4109 } 4110 while (0); 4111 4112 /* 4113 * If we have an iv we can set it directly, otherwise use saved IV. 4114 */ 4115 if (iv == NULL && octx->iv_set) 4116 iv = octx->iv; 4117 if (iv) { 4118 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 4119 != 1) 4120 return 0; 4121 octx->iv_set = 1; 4122 } 4123 octx->key_set = 1; 4124 } else { 4125 /* If key set use IV, otherwise copy */ 4126 if (octx->key_set) 4127 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 4128 else 4129 memcpy(octx->iv, iv, octx->ivlen); 4130 octx->iv_set = 1; 4131 } 4132 return 1; 4133 } 4134 4135 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 4136 const unsigned char *in, size_t len) 4137 { 4138 unsigned char *buf; 4139 int *buf_len; 4140 int written_len = 0; 4141 size_t trailing_len; 4142 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 4143 4144 /* If IV or Key not set then return error */ 4145 if (!octx->iv_set) 4146 return -1; 4147 4148 if (!octx->key_set) 4149 return -1; 4150 4151 if (in != NULL) { 4152 /* 4153 * Need to ensure we are only passing full blocks to low level OCB 4154 * routines. We do it here rather than in EVP_EncryptUpdate/ 4155 * EVP_DecryptUpdate because we need to pass full blocks of AAD too 4156 * and those routines don't support that 4157 */ 4158 4159 /* Are we dealing with AAD or normal data here? */ 4160 if (out == NULL) { 4161 buf = octx->aad_buf; 4162 buf_len = &(octx->aad_buf_len); 4163 } else { 4164 buf = octx->data_buf; 4165 buf_len = &(octx->data_buf_len); 4166 4167 if (is_partially_overlapping(out + *buf_len, in, len)) { 4168 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING); 4169 return 0; 4170 } 4171 } 4172 4173 /* 4174 * If we've got a partially filled buffer from a previous call then 4175 * use that data first 4176 */ 4177 if (*buf_len > 0) { 4178 unsigned int remaining; 4179 4180 remaining = AES_BLOCK_SIZE - (*buf_len); 4181 if (remaining > len) { 4182 memcpy(buf + (*buf_len), in, len); 4183 *(buf_len) += len; 4184 return 0; 4185 } 4186 memcpy(buf + (*buf_len), in, remaining); 4187 4188 /* 4189 * If we get here we've filled the buffer, so process it 4190 */ 4191 len -= remaining; 4192 in += remaining; 4193 if (out == NULL) { 4194 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE)) 4195 return -1; 4196 } else if (EVP_CIPHER_CTX_encrypting(ctx)) { 4197 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out, 4198 AES_BLOCK_SIZE)) 4199 return -1; 4200 } else { 4201 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out, 4202 AES_BLOCK_SIZE)) 4203 return -1; 4204 } 4205 written_len = AES_BLOCK_SIZE; 4206 *buf_len = 0; 4207 if (out != NULL) 4208 out += AES_BLOCK_SIZE; 4209 } 4210 4211 /* Do we have a partial block to handle at the end? */ 4212 trailing_len = len % AES_BLOCK_SIZE; 4213 4214 /* 4215 * If we've got some full blocks to handle, then process these first 4216 */ 4217 if (len != trailing_len) { 4218 if (out == NULL) { 4219 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len)) 4220 return -1; 4221 } else if (EVP_CIPHER_CTX_encrypting(ctx)) { 4222 if (!CRYPTO_ocb128_encrypt 4223 (&octx->ocb, in, out, len - trailing_len)) 4224 return -1; 4225 } else { 4226 if (!CRYPTO_ocb128_decrypt 4227 (&octx->ocb, in, out, len - trailing_len)) 4228 return -1; 4229 } 4230 written_len += len - trailing_len; 4231 in += len - trailing_len; 4232 } 4233 4234 /* Handle any trailing partial block */ 4235 if (trailing_len > 0) { 4236 memcpy(buf, in, trailing_len); 4237 *buf_len = trailing_len; 4238 } 4239 4240 return written_len; 4241 } else { 4242 /* 4243 * First of all empty the buffer of any partial block that we might 4244 * have been provided - both for data and AAD 4245 */ 4246 if (octx->data_buf_len > 0) { 4247 if (EVP_CIPHER_CTX_encrypting(ctx)) { 4248 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out, 4249 octx->data_buf_len)) 4250 return -1; 4251 } else { 4252 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out, 4253 octx->data_buf_len)) 4254 return -1; 4255 } 4256 written_len = octx->data_buf_len; 4257 octx->data_buf_len = 0; 4258 } 4259 if (octx->aad_buf_len > 0) { 4260 if (!CRYPTO_ocb128_aad 4261 (&octx->ocb, octx->aad_buf, octx->aad_buf_len)) 4262 return -1; 4263 octx->aad_buf_len = 0; 4264 } 4265 /* If decrypting then verify */ 4266 if (!EVP_CIPHER_CTX_encrypting(ctx)) { 4267 if (octx->taglen < 0) 4268 return -1; 4269 if (CRYPTO_ocb128_finish(&octx->ocb, 4270 octx->tag, octx->taglen) != 0) 4271 return -1; 4272 octx->iv_set = 0; 4273 return written_len; 4274 } 4275 /* If encrypting then just get the tag */ 4276 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1) 4277 return -1; 4278 /* Don't reuse the IV */ 4279 octx->iv_set = 0; 4280 return written_len; 4281 } 4282 } 4283 4284 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c) 4285 { 4286 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c); 4287 CRYPTO_ocb128_cleanup(&octx->ocb); 4288 return 1; 4289 } 4290 4291 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB, 4292 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4293 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB, 4294 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4295 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB, 4296 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4297 #endif /* OPENSSL_NO_OCB */ 4298