1 /* 2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved. 3 * 4 * Licensed under the OpenSSL license (the "License"). You may not use 5 * this file except in compliance with the License. You can obtain a copy 6 * in the file LICENSE in the source distribution or at 7 * https://www.openssl.org/source/license.html 8 */ 9 10 #include <openssl/opensslconf.h> 11 #include <openssl/crypto.h> 12 #include <openssl/evp.h> 13 #include <openssl/err.h> 14 #include <string.h> 15 #include <assert.h> 16 #include <openssl/aes.h> 17 #include "internal/evp_int.h" 18 #include "modes_lcl.h" 19 #include <openssl/rand.h> 20 #include "evp_locl.h" 21 22 typedef struct { 23 union { 24 double align; 25 AES_KEY ks; 26 } ks; 27 block128_f block; 28 union { 29 cbc128_f cbc; 30 ctr128_f ctr; 31 } stream; 32 } EVP_AES_KEY; 33 34 typedef struct { 35 union { 36 double align; 37 AES_KEY ks; 38 } ks; /* AES key schedule to use */ 39 int key_set; /* Set if key initialised */ 40 int iv_set; /* Set if an iv is set */ 41 GCM128_CONTEXT gcm; 42 unsigned char *iv; /* Temporary IV store */ 43 int ivlen; /* IV length */ 44 int taglen; 45 int iv_gen; /* It is OK to generate IVs */ 46 int tls_aad_len; /* TLS AAD length */ 47 ctr128_f ctr; 48 } EVP_AES_GCM_CTX; 49 50 typedef struct { 51 union { 52 double align; 53 AES_KEY ks; 54 } ks1, ks2; /* AES key schedules to use */ 55 XTS128_CONTEXT xts; 56 void (*stream) (const unsigned char *in, 57 unsigned char *out, size_t length, 58 const AES_KEY *key1, const AES_KEY *key2, 59 const unsigned char iv[16]); 60 } EVP_AES_XTS_CTX; 61 62 typedef struct { 63 union { 64 double align; 65 AES_KEY ks; 66 } ks; /* AES key schedule to use */ 67 int key_set; /* Set if key initialised */ 68 int iv_set; /* Set if an iv is set */ 69 int tag_set; /* Set if tag is valid */ 70 int len_set; /* Set if message length set */ 71 int L, M; /* L and M parameters from RFC3610 */ 72 int tls_aad_len; /* TLS AAD length */ 73 CCM128_CONTEXT ccm; 74 ccm128_f str; 75 } EVP_AES_CCM_CTX; 76 77 #ifndef OPENSSL_NO_OCB 78 typedef struct { 79 union { 80 double align; 81 AES_KEY ks; 82 } ksenc; /* AES key schedule to use for encryption */ 83 union { 84 double align; 85 AES_KEY ks; 86 } ksdec; /* AES key schedule to use for decryption */ 87 int key_set; /* Set if key initialised */ 88 int iv_set; /* Set if an iv is set */ 89 OCB128_CONTEXT ocb; 90 unsigned char *iv; /* Temporary IV store */ 91 unsigned char tag[16]; 92 unsigned char data_buf[16]; /* Store partial data blocks */ 93 unsigned char aad_buf[16]; /* Store partial AAD blocks */ 94 int data_buf_len; 95 int aad_buf_len; 96 int ivlen; /* IV length */ 97 int taglen; 98 } EVP_AES_OCB_CTX; 99 #endif 100 101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4)) 102 103 #ifdef VPAES_ASM 104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits, 105 AES_KEY *key); 106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits, 107 AES_KEY *key); 108 109 void vpaes_encrypt(const unsigned char *in, unsigned char *out, 110 const AES_KEY *key); 111 void vpaes_decrypt(const unsigned char *in, unsigned char *out, 112 const AES_KEY *key); 113 114 void vpaes_cbc_encrypt(const unsigned char *in, 115 unsigned char *out, 116 size_t length, 117 const AES_KEY *key, unsigned char *ivec, int enc); 118 #endif 119 #ifdef BSAES_ASM 120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out, 121 size_t length, const AES_KEY *key, 122 unsigned char ivec[16], int enc); 123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, 124 size_t len, const AES_KEY *key, 125 const unsigned char ivec[16]); 126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out, 127 size_t len, const AES_KEY *key1, 128 const AES_KEY *key2, const unsigned char iv[16]); 129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out, 130 size_t len, const AES_KEY *key1, 131 const AES_KEY *key2, const unsigned char iv[16]); 132 #endif 133 #ifdef AES_CTR_ASM 134 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out, 135 size_t blocks, const AES_KEY *key, 136 const unsigned char ivec[AES_BLOCK_SIZE]); 137 #endif 138 #ifdef AES_XTS_ASM 139 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len, 140 const AES_KEY *key1, const AES_KEY *key2, 141 const unsigned char iv[16]); 142 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len, 143 const AES_KEY *key1, const AES_KEY *key2, 144 const unsigned char iv[16]); 145 #endif 146 147 /* increment counter (64-bit int) by 1 */ 148 static void ctr64_inc(unsigned char *counter) 149 { 150 int n = 8; 151 unsigned char c; 152 153 do { 154 --n; 155 c = counter[n]; 156 ++c; 157 counter[n] = c; 158 if (c) 159 return; 160 } while (n); 161 } 162 163 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC)) 164 # include "ppc_arch.h" 165 # ifdef VPAES_ASM 166 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC) 167 # endif 168 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207) 169 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key 170 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key 171 # define HWAES_encrypt aes_p8_encrypt 172 # define HWAES_decrypt aes_p8_decrypt 173 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt 174 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks 175 # define HWAES_xts_encrypt aes_p8_xts_encrypt 176 # define HWAES_xts_decrypt aes_p8_xts_decrypt 177 #endif 178 179 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \ 180 ((defined(__i386) || defined(__i386__) || \ 181 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \ 182 defined(__x86_64) || defined(__x86_64__) || \ 183 defined(_M_AMD64) || defined(_M_X64) ) 184 185 extern unsigned int OPENSSL_ia32cap_P[]; 186 187 # ifdef VPAES_ASM 188 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) 189 # endif 190 # ifdef BSAES_ASM 191 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32))) 192 # endif 193 /* 194 * AES-NI section 195 */ 196 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32))) 197 198 int aesni_set_encrypt_key(const unsigned char *userKey, int bits, 199 AES_KEY *key); 200 int aesni_set_decrypt_key(const unsigned char *userKey, int bits, 201 AES_KEY *key); 202 203 void aesni_encrypt(const unsigned char *in, unsigned char *out, 204 const AES_KEY *key); 205 void aesni_decrypt(const unsigned char *in, unsigned char *out, 206 const AES_KEY *key); 207 208 void aesni_ecb_encrypt(const unsigned char *in, 209 unsigned char *out, 210 size_t length, const AES_KEY *key, int enc); 211 void aesni_cbc_encrypt(const unsigned char *in, 212 unsigned char *out, 213 size_t length, 214 const AES_KEY *key, unsigned char *ivec, int enc); 215 216 void aesni_ctr32_encrypt_blocks(const unsigned char *in, 217 unsigned char *out, 218 size_t blocks, 219 const void *key, const unsigned char *ivec); 220 221 void aesni_xts_encrypt(const unsigned char *in, 222 unsigned char *out, 223 size_t length, 224 const AES_KEY *key1, const AES_KEY *key2, 225 const unsigned char iv[16]); 226 227 void aesni_xts_decrypt(const unsigned char *in, 228 unsigned char *out, 229 size_t length, 230 const AES_KEY *key1, const AES_KEY *key2, 231 const unsigned char iv[16]); 232 233 void aesni_ccm64_encrypt_blocks(const unsigned char *in, 234 unsigned char *out, 235 size_t blocks, 236 const void *key, 237 const unsigned char ivec[16], 238 unsigned char cmac[16]); 239 240 void aesni_ccm64_decrypt_blocks(const unsigned char *in, 241 unsigned char *out, 242 size_t blocks, 243 const void *key, 244 const unsigned char ivec[16], 245 unsigned char cmac[16]); 246 247 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64) 248 size_t aesni_gcm_encrypt(const unsigned char *in, 249 unsigned char *out, 250 size_t len, 251 const void *key, unsigned char ivec[16], u64 *Xi); 252 # define AES_gcm_encrypt aesni_gcm_encrypt 253 size_t aesni_gcm_decrypt(const unsigned char *in, 254 unsigned char *out, 255 size_t len, 256 const void *key, unsigned char ivec[16], u64 *Xi); 257 # define AES_gcm_decrypt aesni_gcm_decrypt 258 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in, 259 size_t len); 260 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \ 261 gctx->gcm.ghash==gcm_ghash_avx) 262 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \ 263 gctx->gcm.ghash==gcm_ghash_avx) 264 # undef AES_GCM_ASM2 /* minor size optimization */ 265 # endif 266 267 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 268 const unsigned char *iv, int enc) 269 { 270 int ret, mode; 271 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 272 273 mode = EVP_CIPHER_CTX_mode(ctx); 274 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 275 && !enc) { 276 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 277 &dat->ks.ks); 278 dat->block = (block128_f) aesni_decrypt; 279 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 280 (cbc128_f) aesni_cbc_encrypt : NULL; 281 } else { 282 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 283 &dat->ks.ks); 284 dat->block = (block128_f) aesni_encrypt; 285 if (mode == EVP_CIPH_CBC_MODE) 286 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt; 287 else if (mode == EVP_CIPH_CTR_MODE) 288 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; 289 else 290 dat->stream.cbc = NULL; 291 } 292 293 if (ret < 0) { 294 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 295 return 0; 296 } 297 298 return 1; 299 } 300 301 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 302 const unsigned char *in, size_t len) 303 { 304 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks, 305 EVP_CIPHER_CTX_iv_noconst(ctx), 306 EVP_CIPHER_CTX_encrypting(ctx)); 307 308 return 1; 309 } 310 311 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 312 const unsigned char *in, size_t len) 313 { 314 size_t bl = EVP_CIPHER_CTX_block_size(ctx); 315 316 if (len < bl) 317 return 1; 318 319 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks, 320 EVP_CIPHER_CTX_encrypting(ctx)); 321 322 return 1; 323 } 324 325 # define aesni_ofb_cipher aes_ofb_cipher 326 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 327 const unsigned char *in, size_t len); 328 329 # define aesni_cfb_cipher aes_cfb_cipher 330 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 331 const unsigned char *in, size_t len); 332 333 # define aesni_cfb8_cipher aes_cfb8_cipher 334 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 335 const unsigned char *in, size_t len); 336 337 # define aesni_cfb1_cipher aes_cfb1_cipher 338 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 339 const unsigned char *in, size_t len); 340 341 # define aesni_ctr_cipher aes_ctr_cipher 342 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 343 const unsigned char *in, size_t len); 344 345 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 346 const unsigned char *iv, int enc) 347 { 348 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 349 if (!iv && !key) 350 return 1; 351 if (key) { 352 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 353 &gctx->ks.ks); 354 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt); 355 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; 356 /* 357 * If we have an iv can set it directly, otherwise use saved IV. 358 */ 359 if (iv == NULL && gctx->iv_set) 360 iv = gctx->iv; 361 if (iv) { 362 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 363 gctx->iv_set = 1; 364 } 365 gctx->key_set = 1; 366 } else { 367 /* If key set use IV, otherwise copy */ 368 if (gctx->key_set) 369 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 370 else 371 memcpy(gctx->iv, iv, gctx->ivlen); 372 gctx->iv_set = 1; 373 gctx->iv_gen = 0; 374 } 375 return 1; 376 } 377 378 # define aesni_gcm_cipher aes_gcm_cipher 379 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 380 const unsigned char *in, size_t len); 381 382 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 383 const unsigned char *iv, int enc) 384 { 385 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 386 if (!iv && !key) 387 return 1; 388 389 if (key) { 390 /* key_len is two AES keys */ 391 if (enc) { 392 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 393 &xctx->ks1.ks); 394 xctx->xts.block1 = (block128_f) aesni_encrypt; 395 xctx->stream = aesni_xts_encrypt; 396 } else { 397 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 398 &xctx->ks1.ks); 399 xctx->xts.block1 = (block128_f) aesni_decrypt; 400 xctx->stream = aesni_xts_decrypt; 401 } 402 403 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 404 EVP_CIPHER_CTX_key_length(ctx) * 4, 405 &xctx->ks2.ks); 406 xctx->xts.block2 = (block128_f) aesni_encrypt; 407 408 xctx->xts.key1 = &xctx->ks1; 409 } 410 411 if (iv) { 412 xctx->xts.key2 = &xctx->ks2; 413 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 414 } 415 416 return 1; 417 } 418 419 # define aesni_xts_cipher aes_xts_cipher 420 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 421 const unsigned char *in, size_t len); 422 423 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 424 const unsigned char *iv, int enc) 425 { 426 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 427 if (!iv && !key) 428 return 1; 429 if (key) { 430 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 431 &cctx->ks.ks); 432 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 433 &cctx->ks, (block128_f) aesni_encrypt); 434 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks : 435 (ccm128_f) aesni_ccm64_decrypt_blocks; 436 cctx->key_set = 1; 437 } 438 if (iv) { 439 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 440 cctx->iv_set = 1; 441 } 442 return 1; 443 } 444 445 # define aesni_ccm_cipher aes_ccm_cipher 446 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 447 const unsigned char *in, size_t len); 448 449 # ifndef OPENSSL_NO_OCB 450 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out, 451 size_t blocks, const void *key, 452 size_t start_block_num, 453 unsigned char offset_i[16], 454 const unsigned char L_[][16], 455 unsigned char checksum[16]); 456 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out, 457 size_t blocks, const void *key, 458 size_t start_block_num, 459 unsigned char offset_i[16], 460 const unsigned char L_[][16], 461 unsigned char checksum[16]); 462 463 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 464 const unsigned char *iv, int enc) 465 { 466 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 467 if (!iv && !key) 468 return 1; 469 if (key) { 470 do { 471 /* 472 * We set both the encrypt and decrypt key here because decrypt 473 * needs both. We could possibly optimise to remove setting the 474 * decrypt for an encryption operation. 475 */ 476 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 477 &octx->ksenc.ks); 478 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 479 &octx->ksdec.ks); 480 if (!CRYPTO_ocb128_init(&octx->ocb, 481 &octx->ksenc.ks, &octx->ksdec.ks, 482 (block128_f) aesni_encrypt, 483 (block128_f) aesni_decrypt, 484 enc ? aesni_ocb_encrypt 485 : aesni_ocb_decrypt)) 486 return 0; 487 } 488 while (0); 489 490 /* 491 * If we have an iv we can set it directly, otherwise use saved IV. 492 */ 493 if (iv == NULL && octx->iv_set) 494 iv = octx->iv; 495 if (iv) { 496 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 497 != 1) 498 return 0; 499 octx->iv_set = 1; 500 } 501 octx->key_set = 1; 502 } else { 503 /* If key set use IV, otherwise copy */ 504 if (octx->key_set) 505 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 506 else 507 memcpy(octx->iv, iv, octx->ivlen); 508 octx->iv_set = 1; 509 } 510 return 1; 511 } 512 513 # define aesni_ocb_cipher aes_ocb_cipher 514 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 515 const unsigned char *in, size_t len); 516 # endif /* OPENSSL_NO_OCB */ 517 518 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 519 static const EVP_CIPHER aesni_##keylen##_##mode = { \ 520 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 521 flags|EVP_CIPH_##MODE##_MODE, \ 522 aesni_init_key, \ 523 aesni_##mode##_cipher, \ 524 NULL, \ 525 sizeof(EVP_AES_KEY), \ 526 NULL,NULL,NULL,NULL }; \ 527 static const EVP_CIPHER aes_##keylen##_##mode = { \ 528 nid##_##keylen##_##nmode,blocksize, \ 529 keylen/8,ivlen, \ 530 flags|EVP_CIPH_##MODE##_MODE, \ 531 aes_init_key, \ 532 aes_##mode##_cipher, \ 533 NULL, \ 534 sizeof(EVP_AES_KEY), \ 535 NULL,NULL,NULL,NULL }; \ 536 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 537 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } 538 539 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 540 static const EVP_CIPHER aesni_##keylen##_##mode = { \ 541 nid##_##keylen##_##mode,blocksize, \ 542 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 543 flags|EVP_CIPH_##MODE##_MODE, \ 544 aesni_##mode##_init_key, \ 545 aesni_##mode##_cipher, \ 546 aes_##mode##_cleanup, \ 547 sizeof(EVP_AES_##MODE##_CTX), \ 548 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 549 static const EVP_CIPHER aes_##keylen##_##mode = { \ 550 nid##_##keylen##_##mode,blocksize, \ 551 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 552 flags|EVP_CIPH_##MODE##_MODE, \ 553 aes_##mode##_init_key, \ 554 aes_##mode##_cipher, \ 555 aes_##mode##_cleanup, \ 556 sizeof(EVP_AES_##MODE##_CTX), \ 557 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 558 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 559 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; } 560 561 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__)) 562 563 # include "sparc_arch.h" 564 565 extern unsigned int OPENSSL_sparcv9cap_P[]; 566 567 /* 568 * Initial Fujitsu SPARC64 X support 569 */ 570 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX) 571 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key 572 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key 573 # define HWAES_encrypt aes_fx_encrypt 574 # define HWAES_decrypt aes_fx_decrypt 575 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt 576 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks 577 578 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES) 579 580 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks); 581 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks); 582 void aes_t4_encrypt(const unsigned char *in, unsigned char *out, 583 const AES_KEY *key); 584 void aes_t4_decrypt(const unsigned char *in, unsigned char *out, 585 const AES_KEY *key); 586 /* 587 * Key-length specific subroutines were chosen for following reason. 588 * Each SPARC T4 core can execute up to 8 threads which share core's 589 * resources. Loading as much key material to registers allows to 590 * minimize references to shared memory interface, as well as amount 591 * of instructions in inner loops [much needed on T4]. But then having 592 * non-key-length specific routines would require conditional branches 593 * either in inner loops or on subroutines' entries. Former is hardly 594 * acceptable, while latter means code size increase to size occupied 595 * by multiple key-length specific subroutines, so why fight? 596 */ 597 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 598 size_t len, const AES_KEY *key, 599 unsigned char *ivec); 600 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 601 size_t len, const AES_KEY *key, 602 unsigned char *ivec); 603 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 604 size_t len, const AES_KEY *key, 605 unsigned char *ivec); 606 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 607 size_t len, const AES_KEY *key, 608 unsigned char *ivec); 609 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out, 610 size_t len, const AES_KEY *key, 611 unsigned char *ivec); 612 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out, 613 size_t len, const AES_KEY *key, 614 unsigned char *ivec); 615 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 616 size_t blocks, const AES_KEY *key, 617 unsigned char *ivec); 618 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 619 size_t blocks, const AES_KEY *key, 620 unsigned char *ivec); 621 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out, 622 size_t blocks, const AES_KEY *key, 623 unsigned char *ivec); 624 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out, 625 size_t blocks, const AES_KEY *key1, 626 const AES_KEY *key2, const unsigned char *ivec); 627 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out, 628 size_t blocks, const AES_KEY *key1, 629 const AES_KEY *key2, const unsigned char *ivec); 630 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out, 631 size_t blocks, const AES_KEY *key1, 632 const AES_KEY *key2, const unsigned char *ivec); 633 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out, 634 size_t blocks, const AES_KEY *key1, 635 const AES_KEY *key2, const unsigned char *ivec); 636 637 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 638 const unsigned char *iv, int enc) 639 { 640 int ret, mode, bits; 641 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 642 643 mode = EVP_CIPHER_CTX_mode(ctx); 644 bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 645 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 646 && !enc) { 647 ret = 0; 648 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks); 649 dat->block = (block128_f) aes_t4_decrypt; 650 switch (bits) { 651 case 128: 652 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 653 (cbc128_f) aes128_t4_cbc_decrypt : NULL; 654 break; 655 case 192: 656 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 657 (cbc128_f) aes192_t4_cbc_decrypt : NULL; 658 break; 659 case 256: 660 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 661 (cbc128_f) aes256_t4_cbc_decrypt : NULL; 662 break; 663 default: 664 ret = -1; 665 } 666 } else { 667 ret = 0; 668 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks); 669 dat->block = (block128_f) aes_t4_encrypt; 670 switch (bits) { 671 case 128: 672 if (mode == EVP_CIPH_CBC_MODE) 673 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt; 674 else if (mode == EVP_CIPH_CTR_MODE) 675 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt; 676 else 677 dat->stream.cbc = NULL; 678 break; 679 case 192: 680 if (mode == EVP_CIPH_CBC_MODE) 681 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt; 682 else if (mode == EVP_CIPH_CTR_MODE) 683 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt; 684 else 685 dat->stream.cbc = NULL; 686 break; 687 case 256: 688 if (mode == EVP_CIPH_CBC_MODE) 689 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt; 690 else if (mode == EVP_CIPH_CTR_MODE) 691 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt; 692 else 693 dat->stream.cbc = NULL; 694 break; 695 default: 696 ret = -1; 697 } 698 } 699 700 if (ret < 0) { 701 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 702 return 0; 703 } 704 705 return 1; 706 } 707 708 # define aes_t4_cbc_cipher aes_cbc_cipher 709 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 710 const unsigned char *in, size_t len); 711 712 # define aes_t4_ecb_cipher aes_ecb_cipher 713 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 714 const unsigned char *in, size_t len); 715 716 # define aes_t4_ofb_cipher aes_ofb_cipher 717 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 718 const unsigned char *in, size_t len); 719 720 # define aes_t4_cfb_cipher aes_cfb_cipher 721 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 722 const unsigned char *in, size_t len); 723 724 # define aes_t4_cfb8_cipher aes_cfb8_cipher 725 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 726 const unsigned char *in, size_t len); 727 728 # define aes_t4_cfb1_cipher aes_cfb1_cipher 729 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 730 const unsigned char *in, size_t len); 731 732 # define aes_t4_ctr_cipher aes_ctr_cipher 733 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 734 const unsigned char *in, size_t len); 735 736 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 737 const unsigned char *iv, int enc) 738 { 739 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 740 if (!iv && !key) 741 return 1; 742 if (key) { 743 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 744 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks); 745 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 746 (block128_f) aes_t4_encrypt); 747 switch (bits) { 748 case 128: 749 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt; 750 break; 751 case 192: 752 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt; 753 break; 754 case 256: 755 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt; 756 break; 757 default: 758 return 0; 759 } 760 /* 761 * If we have an iv can set it directly, otherwise use saved IV. 762 */ 763 if (iv == NULL && gctx->iv_set) 764 iv = gctx->iv; 765 if (iv) { 766 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 767 gctx->iv_set = 1; 768 } 769 gctx->key_set = 1; 770 } else { 771 /* If key set use IV, otherwise copy */ 772 if (gctx->key_set) 773 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 774 else 775 memcpy(gctx->iv, iv, gctx->ivlen); 776 gctx->iv_set = 1; 777 gctx->iv_gen = 0; 778 } 779 return 1; 780 } 781 782 # define aes_t4_gcm_cipher aes_gcm_cipher 783 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 784 const unsigned char *in, size_t len); 785 786 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 787 const unsigned char *iv, int enc) 788 { 789 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 790 if (!iv && !key) 791 return 1; 792 793 if (key) { 794 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4; 795 xctx->stream = NULL; 796 /* key_len is two AES keys */ 797 if (enc) { 798 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks); 799 xctx->xts.block1 = (block128_f) aes_t4_encrypt; 800 switch (bits) { 801 case 128: 802 xctx->stream = aes128_t4_xts_encrypt; 803 break; 804 case 256: 805 xctx->stream = aes256_t4_xts_encrypt; 806 break; 807 default: 808 return 0; 809 } 810 } else { 811 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 812 &xctx->ks1.ks); 813 xctx->xts.block1 = (block128_f) aes_t4_decrypt; 814 switch (bits) { 815 case 128: 816 xctx->stream = aes128_t4_xts_decrypt; 817 break; 818 case 256: 819 xctx->stream = aes256_t4_xts_decrypt; 820 break; 821 default: 822 return 0; 823 } 824 } 825 826 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 827 EVP_CIPHER_CTX_key_length(ctx) * 4, 828 &xctx->ks2.ks); 829 xctx->xts.block2 = (block128_f) aes_t4_encrypt; 830 831 xctx->xts.key1 = &xctx->ks1; 832 } 833 834 if (iv) { 835 xctx->xts.key2 = &xctx->ks2; 836 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 837 } 838 839 return 1; 840 } 841 842 # define aes_t4_xts_cipher aes_xts_cipher 843 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 844 const unsigned char *in, size_t len); 845 846 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 847 const unsigned char *iv, int enc) 848 { 849 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 850 if (!iv && !key) 851 return 1; 852 if (key) { 853 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8; 854 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks); 855 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 856 &cctx->ks, (block128_f) aes_t4_encrypt); 857 cctx->str = NULL; 858 cctx->key_set = 1; 859 } 860 if (iv) { 861 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 862 cctx->iv_set = 1; 863 } 864 return 1; 865 } 866 867 # define aes_t4_ccm_cipher aes_ccm_cipher 868 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 869 const unsigned char *in, size_t len); 870 871 # ifndef OPENSSL_NO_OCB 872 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 873 const unsigned char *iv, int enc) 874 { 875 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 876 if (!iv && !key) 877 return 1; 878 if (key) { 879 do { 880 /* 881 * We set both the encrypt and decrypt key here because decrypt 882 * needs both. We could possibly optimise to remove setting the 883 * decrypt for an encryption operation. 884 */ 885 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 886 &octx->ksenc.ks); 887 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 888 &octx->ksdec.ks); 889 if (!CRYPTO_ocb128_init(&octx->ocb, 890 &octx->ksenc.ks, &octx->ksdec.ks, 891 (block128_f) aes_t4_encrypt, 892 (block128_f) aes_t4_decrypt, 893 NULL)) 894 return 0; 895 } 896 while (0); 897 898 /* 899 * If we have an iv we can set it directly, otherwise use saved IV. 900 */ 901 if (iv == NULL && octx->iv_set) 902 iv = octx->iv; 903 if (iv) { 904 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 905 != 1) 906 return 0; 907 octx->iv_set = 1; 908 } 909 octx->key_set = 1; 910 } else { 911 /* If key set use IV, otherwise copy */ 912 if (octx->key_set) 913 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 914 else 915 memcpy(octx->iv, iv, octx->ivlen); 916 octx->iv_set = 1; 917 } 918 return 1; 919 } 920 921 # define aes_t4_ocb_cipher aes_ocb_cipher 922 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 923 const unsigned char *in, size_t len); 924 # endif /* OPENSSL_NO_OCB */ 925 926 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 927 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \ 928 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 929 flags|EVP_CIPH_##MODE##_MODE, \ 930 aes_t4_init_key, \ 931 aes_t4_##mode##_cipher, \ 932 NULL, \ 933 sizeof(EVP_AES_KEY), \ 934 NULL,NULL,NULL,NULL }; \ 935 static const EVP_CIPHER aes_##keylen##_##mode = { \ 936 nid##_##keylen##_##nmode,blocksize, \ 937 keylen/8,ivlen, \ 938 flags|EVP_CIPH_##MODE##_MODE, \ 939 aes_init_key, \ 940 aes_##mode##_cipher, \ 941 NULL, \ 942 sizeof(EVP_AES_KEY), \ 943 NULL,NULL,NULL,NULL }; \ 944 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 945 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; } 946 947 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 948 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \ 949 nid##_##keylen##_##mode,blocksize, \ 950 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 951 flags|EVP_CIPH_##MODE##_MODE, \ 952 aes_t4_##mode##_init_key, \ 953 aes_t4_##mode##_cipher, \ 954 aes_##mode##_cleanup, \ 955 sizeof(EVP_AES_##MODE##_CTX), \ 956 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 957 static const EVP_CIPHER aes_##keylen##_##mode = { \ 958 nid##_##keylen##_##mode,blocksize, \ 959 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 960 flags|EVP_CIPH_##MODE##_MODE, \ 961 aes_##mode##_init_key, \ 962 aes_##mode##_cipher, \ 963 aes_##mode##_cleanup, \ 964 sizeof(EVP_AES_##MODE##_CTX), \ 965 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 966 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 967 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; } 968 969 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__) 970 /* 971 * IBM S390X support 972 */ 973 # include "s390x_arch.h" 974 975 typedef struct { 976 union { 977 double align; 978 /*- 979 * KM-AES parameter block - begin 980 * (see z/Architecture Principles of Operation >= SA22-7832-06) 981 */ 982 struct { 983 unsigned char k[32]; 984 } param; 985 /* KM-AES parameter block - end */ 986 } km; 987 unsigned int fc; 988 } S390X_AES_ECB_CTX; 989 990 typedef struct { 991 union { 992 double align; 993 /*- 994 * KMO-AES parameter block - begin 995 * (see z/Architecture Principles of Operation >= SA22-7832-08) 996 */ 997 struct { 998 unsigned char cv[16]; 999 unsigned char k[32]; 1000 } param; 1001 /* KMO-AES parameter block - end */ 1002 } kmo; 1003 unsigned int fc; 1004 1005 int res; 1006 } S390X_AES_OFB_CTX; 1007 1008 typedef struct { 1009 union { 1010 double align; 1011 /*- 1012 * KMF-AES parameter block - begin 1013 * (see z/Architecture Principles of Operation >= SA22-7832-08) 1014 */ 1015 struct { 1016 unsigned char cv[16]; 1017 unsigned char k[32]; 1018 } param; 1019 /* KMF-AES parameter block - end */ 1020 } kmf; 1021 unsigned int fc; 1022 1023 int res; 1024 } S390X_AES_CFB_CTX; 1025 1026 typedef struct { 1027 union { 1028 double align; 1029 /*- 1030 * KMA-GCM-AES parameter block - begin 1031 * (see z/Architecture Principles of Operation >= SA22-7832-11) 1032 */ 1033 struct { 1034 unsigned char reserved[12]; 1035 union { 1036 unsigned int w; 1037 unsigned char b[4]; 1038 } cv; 1039 union { 1040 unsigned long long g[2]; 1041 unsigned char b[16]; 1042 } t; 1043 unsigned char h[16]; 1044 unsigned long long taadl; 1045 unsigned long long tpcl; 1046 union { 1047 unsigned long long g[2]; 1048 unsigned int w[4]; 1049 } j0; 1050 unsigned char k[32]; 1051 } param; 1052 /* KMA-GCM-AES parameter block - end */ 1053 } kma; 1054 unsigned int fc; 1055 int key_set; 1056 1057 unsigned char *iv; 1058 int ivlen; 1059 int iv_set; 1060 int iv_gen; 1061 1062 int taglen; 1063 1064 unsigned char ares[16]; 1065 unsigned char mres[16]; 1066 unsigned char kres[16]; 1067 int areslen; 1068 int mreslen; 1069 int kreslen; 1070 1071 int tls_aad_len; 1072 } S390X_AES_GCM_CTX; 1073 1074 typedef struct { 1075 union { 1076 double align; 1077 /*- 1078 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and 1079 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's 1080 * rounds field is used to store the function code and that the key 1081 * schedule is not stored (if aes hardware support is detected). 1082 */ 1083 struct { 1084 unsigned char pad[16]; 1085 AES_KEY k; 1086 } key; 1087 1088 struct { 1089 /*- 1090 * KMAC-AES parameter block - begin 1091 * (see z/Architecture Principles of Operation >= SA22-7832-08) 1092 */ 1093 struct { 1094 union { 1095 unsigned long long g[2]; 1096 unsigned char b[16]; 1097 } icv; 1098 unsigned char k[32]; 1099 } kmac_param; 1100 /* KMAC-AES paramater block - end */ 1101 1102 union { 1103 unsigned long long g[2]; 1104 unsigned char b[16]; 1105 } nonce; 1106 union { 1107 unsigned long long g[2]; 1108 unsigned char b[16]; 1109 } buf; 1110 1111 unsigned long long blocks; 1112 int l; 1113 int m; 1114 int tls_aad_len; 1115 int iv_set; 1116 int tag_set; 1117 int len_set; 1118 int key_set; 1119 1120 unsigned char pad[140]; 1121 unsigned int fc; 1122 } ccm; 1123 } aes; 1124 } S390X_AES_CCM_CTX; 1125 1126 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */ 1127 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6)) 1128 1129 /* Most modes of operation need km for partial block processing. */ 1130 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1131 S390X_CAPBIT(S390X_AES_128)) 1132 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1133 S390X_CAPBIT(S390X_AES_192)) 1134 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \ 1135 S390X_CAPBIT(S390X_AES_256)) 1136 1137 # define s390x_aes_init_key aes_init_key 1138 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 1139 const unsigned char *iv, int enc); 1140 1141 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */ 1142 # define S390X_aes_192_cbc_CAPABLE 1 1143 # define S390X_aes_256_cbc_CAPABLE 1 1144 # define S390X_AES_CBC_CTX EVP_AES_KEY 1145 1146 # define s390x_aes_cbc_init_key aes_init_key 1147 1148 # define s390x_aes_cbc_cipher aes_cbc_cipher 1149 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1150 const unsigned char *in, size_t len); 1151 1152 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE 1153 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE 1154 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE 1155 1156 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx, 1157 const unsigned char *key, 1158 const unsigned char *iv, int enc) 1159 { 1160 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx); 1161 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1162 1163 cctx->fc = S390X_AES_FC(keylen); 1164 if (!enc) 1165 cctx->fc |= S390X_DECRYPT; 1166 1167 memcpy(cctx->km.param.k, key, keylen); 1168 return 1; 1169 } 1170 1171 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1172 const unsigned char *in, size_t len) 1173 { 1174 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx); 1175 1176 s390x_km(in, len, out, cctx->fc, &cctx->km.param); 1177 return 1; 1178 } 1179 1180 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \ 1181 (OPENSSL_s390xcap_P.kmo[0] & \ 1182 S390X_CAPBIT(S390X_AES_128))) 1183 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \ 1184 (OPENSSL_s390xcap_P.kmo[0] & \ 1185 S390X_CAPBIT(S390X_AES_192))) 1186 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \ 1187 (OPENSSL_s390xcap_P.kmo[0] & \ 1188 S390X_CAPBIT(S390X_AES_256))) 1189 1190 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx, 1191 const unsigned char *key, 1192 const unsigned char *ivec, int enc) 1193 { 1194 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx); 1195 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx); 1196 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1197 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1198 1199 memcpy(cctx->kmo.param.cv, iv, ivlen); 1200 memcpy(cctx->kmo.param.k, key, keylen); 1201 cctx->fc = S390X_AES_FC(keylen); 1202 cctx->res = 0; 1203 return 1; 1204 } 1205 1206 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1207 const unsigned char *in, size_t len) 1208 { 1209 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx); 1210 int n = cctx->res; 1211 int rem; 1212 1213 while (n && len) { 1214 *out = *in ^ cctx->kmo.param.cv[n]; 1215 n = (n + 1) & 0xf; 1216 --len; 1217 ++in; 1218 ++out; 1219 } 1220 1221 rem = len & 0xf; 1222 1223 len &= ~(size_t)0xf; 1224 if (len) { 1225 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param); 1226 1227 out += len; 1228 in += len; 1229 } 1230 1231 if (rem) { 1232 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc, 1233 cctx->kmo.param.k); 1234 1235 while (rem--) { 1236 out[n] = in[n] ^ cctx->kmo.param.cv[n]; 1237 ++n; 1238 } 1239 } 1240 1241 cctx->res = n; 1242 return 1; 1243 } 1244 1245 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \ 1246 (OPENSSL_s390xcap_P.kmf[0] & \ 1247 S390X_CAPBIT(S390X_AES_128))) 1248 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \ 1249 (OPENSSL_s390xcap_P.kmf[0] & \ 1250 S390X_CAPBIT(S390X_AES_192))) 1251 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \ 1252 (OPENSSL_s390xcap_P.kmf[0] & \ 1253 S390X_CAPBIT(S390X_AES_256))) 1254 1255 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx, 1256 const unsigned char *key, 1257 const unsigned char *ivec, int enc) 1258 { 1259 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1260 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx); 1261 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1262 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1263 1264 cctx->fc = S390X_AES_FC(keylen); 1265 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */ 1266 if (!enc) 1267 cctx->fc |= S390X_DECRYPT; 1268 1269 cctx->res = 0; 1270 memcpy(cctx->kmf.param.cv, iv, ivlen); 1271 memcpy(cctx->kmf.param.k, key, keylen); 1272 return 1; 1273 } 1274 1275 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1276 const unsigned char *in, size_t len) 1277 { 1278 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1279 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1280 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 1281 int n = cctx->res; 1282 int rem; 1283 unsigned char tmp; 1284 1285 while (n && len) { 1286 tmp = *in; 1287 *out = cctx->kmf.param.cv[n] ^ tmp; 1288 cctx->kmf.param.cv[n] = enc ? *out : tmp; 1289 n = (n + 1) & 0xf; 1290 --len; 1291 ++in; 1292 ++out; 1293 } 1294 1295 rem = len & 0xf; 1296 1297 len &= ~(size_t)0xf; 1298 if (len) { 1299 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param); 1300 1301 out += len; 1302 in += len; 1303 } 1304 1305 if (rem) { 1306 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv, 1307 S390X_AES_FC(keylen), cctx->kmf.param.k); 1308 1309 while (rem--) { 1310 tmp = in[n]; 1311 out[n] = cctx->kmf.param.cv[n] ^ tmp; 1312 cctx->kmf.param.cv[n] = enc ? out[n] : tmp; 1313 ++n; 1314 } 1315 } 1316 1317 cctx->res = n; 1318 return 1; 1319 } 1320 1321 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1322 S390X_CAPBIT(S390X_AES_128)) 1323 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1324 S390X_CAPBIT(S390X_AES_192)) 1325 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \ 1326 S390X_CAPBIT(S390X_AES_256)) 1327 1328 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx, 1329 const unsigned char *key, 1330 const unsigned char *ivec, int enc) 1331 { 1332 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1333 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx); 1334 const int keylen = EVP_CIPHER_CTX_key_length(ctx); 1335 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx); 1336 1337 cctx->fc = S390X_AES_FC(keylen); 1338 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */ 1339 if (!enc) 1340 cctx->fc |= S390X_DECRYPT; 1341 1342 memcpy(cctx->kmf.param.cv, iv, ivlen); 1343 memcpy(cctx->kmf.param.k, key, keylen); 1344 return 1; 1345 } 1346 1347 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1348 const unsigned char *in, size_t len) 1349 { 1350 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx); 1351 1352 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param); 1353 return 1; 1354 } 1355 1356 # define S390X_aes_128_cfb1_CAPABLE 0 1357 # define S390X_aes_192_cfb1_CAPABLE 0 1358 # define S390X_aes_256_cfb1_CAPABLE 0 1359 1360 # define s390x_aes_cfb1_init_key aes_init_key 1361 1362 # define s390x_aes_cfb1_cipher aes_cfb1_cipher 1363 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1364 const unsigned char *in, size_t len); 1365 1366 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */ 1367 # define S390X_aes_192_ctr_CAPABLE 1 1368 # define S390X_aes_256_ctr_CAPABLE 1 1369 # define S390X_AES_CTR_CTX EVP_AES_KEY 1370 1371 # define s390x_aes_ctr_init_key aes_init_key 1372 1373 # define s390x_aes_ctr_cipher aes_ctr_cipher 1374 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1375 const unsigned char *in, size_t len); 1376 1377 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \ 1378 (OPENSSL_s390xcap_P.kma[0] & \ 1379 S390X_CAPBIT(S390X_AES_128))) 1380 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \ 1381 (OPENSSL_s390xcap_P.kma[0] & \ 1382 S390X_CAPBIT(S390X_AES_192))) 1383 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \ 1384 (OPENSSL_s390xcap_P.kma[0] & \ 1385 S390X_CAPBIT(S390X_AES_256))) 1386 1387 /* iv + padding length for iv lenghts != 12 */ 1388 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16) 1389 1390 /*- 1391 * Process additional authenticated data. Returns 0 on success. Code is 1392 * big-endian. 1393 */ 1394 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad, 1395 size_t len) 1396 { 1397 unsigned long long alen; 1398 int n, rem; 1399 1400 if (ctx->kma.param.tpcl) 1401 return -2; 1402 1403 alen = ctx->kma.param.taadl + len; 1404 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len)) 1405 return -1; 1406 ctx->kma.param.taadl = alen; 1407 1408 n = ctx->areslen; 1409 if (n) { 1410 while (n && len) { 1411 ctx->ares[n] = *aad; 1412 n = (n + 1) & 0xf; 1413 ++aad; 1414 --len; 1415 } 1416 /* ctx->ares contains a complete block if offset has wrapped around */ 1417 if (!n) { 1418 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param); 1419 ctx->fc |= S390X_KMA_HS; 1420 } 1421 ctx->areslen = n; 1422 } 1423 1424 rem = len & 0xf; 1425 1426 len &= ~(size_t)0xf; 1427 if (len) { 1428 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param); 1429 aad += len; 1430 ctx->fc |= S390X_KMA_HS; 1431 } 1432 1433 if (rem) { 1434 ctx->areslen = rem; 1435 1436 do { 1437 --rem; 1438 ctx->ares[rem] = aad[rem]; 1439 } while (rem); 1440 } 1441 return 0; 1442 } 1443 1444 /*- 1445 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for 1446 * success. Code is big-endian. 1447 */ 1448 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in, 1449 unsigned char *out, size_t len) 1450 { 1451 const unsigned char *inptr; 1452 unsigned long long mlen; 1453 union { 1454 unsigned int w[4]; 1455 unsigned char b[16]; 1456 } buf; 1457 size_t inlen; 1458 int n, rem, i; 1459 1460 mlen = ctx->kma.param.tpcl + len; 1461 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len)) 1462 return -1; 1463 ctx->kma.param.tpcl = mlen; 1464 1465 n = ctx->mreslen; 1466 if (n) { 1467 inptr = in; 1468 inlen = len; 1469 while (n && inlen) { 1470 ctx->mres[n] = *inptr; 1471 n = (n + 1) & 0xf; 1472 ++inptr; 1473 --inlen; 1474 } 1475 /* ctx->mres contains a complete block if offset has wrapped around */ 1476 if (!n) { 1477 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b, 1478 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param); 1479 ctx->fc |= S390X_KMA_HS; 1480 ctx->areslen = 0; 1481 1482 /* previous call already encrypted/decrypted its remainder, 1483 * see comment below */ 1484 n = ctx->mreslen; 1485 while (n) { 1486 *out = buf.b[n]; 1487 n = (n + 1) & 0xf; 1488 ++out; 1489 ++in; 1490 --len; 1491 } 1492 ctx->mreslen = 0; 1493 } 1494 } 1495 1496 rem = len & 0xf; 1497 1498 len &= ~(size_t)0xf; 1499 if (len) { 1500 s390x_kma(ctx->ares, ctx->areslen, in, len, out, 1501 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param); 1502 in += len; 1503 out += len; 1504 ctx->fc |= S390X_KMA_HS; 1505 ctx->areslen = 0; 1506 } 1507 1508 /*- 1509 * If there is a remainder, it has to be saved such that it can be 1510 * processed by kma later. However, we also have to do the for-now 1511 * unauthenticated encryption/decryption part here and now... 1512 */ 1513 if (rem) { 1514 if (!ctx->mreslen) { 1515 buf.w[0] = ctx->kma.param.j0.w[0]; 1516 buf.w[1] = ctx->kma.param.j0.w[1]; 1517 buf.w[2] = ctx->kma.param.j0.w[2]; 1518 buf.w[3] = ctx->kma.param.cv.w + 1; 1519 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k); 1520 } 1521 1522 n = ctx->mreslen; 1523 for (i = 0; i < rem; i++) { 1524 ctx->mres[n + i] = in[i]; 1525 out[i] = in[i] ^ ctx->kres[n + i]; 1526 } 1527 1528 ctx->mreslen += rem; 1529 } 1530 return 0; 1531 } 1532 1533 /*- 1534 * Initialize context structure. Code is big-endian. 1535 */ 1536 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx, 1537 const unsigned char *iv) 1538 { 1539 ctx->kma.param.t.g[0] = 0; 1540 ctx->kma.param.t.g[1] = 0; 1541 ctx->kma.param.tpcl = 0; 1542 ctx->kma.param.taadl = 0; 1543 ctx->mreslen = 0; 1544 ctx->areslen = 0; 1545 ctx->kreslen = 0; 1546 1547 if (ctx->ivlen == 12) { 1548 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen); 1549 ctx->kma.param.j0.w[3] = 1; 1550 ctx->kma.param.cv.w = 1; 1551 } else { 1552 /* ctx->iv has the right size and is already padded. */ 1553 memcpy(ctx->iv, iv, ctx->ivlen); 1554 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL, 1555 ctx->fc, &ctx->kma.param); 1556 ctx->fc |= S390X_KMA_HS; 1557 1558 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0]; 1559 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1]; 1560 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3]; 1561 ctx->kma.param.t.g[0] = 0; 1562 ctx->kma.param.t.g[1] = 0; 1563 } 1564 } 1565 1566 /*- 1567 * Performs various operations on the context structure depending on control 1568 * type. Returns 1 for success, 0 for failure and -1 for unknown control type. 1569 * Code is big-endian. 1570 */ 1571 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 1572 { 1573 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c); 1574 S390X_AES_GCM_CTX *gctx_out; 1575 EVP_CIPHER_CTX *out; 1576 unsigned char *buf, *iv; 1577 int ivlen, enc, len; 1578 1579 switch (type) { 1580 case EVP_CTRL_INIT: 1581 ivlen = EVP_CIPHER_CTX_iv_length(c); 1582 iv = EVP_CIPHER_CTX_iv_noconst(c); 1583 gctx->key_set = 0; 1584 gctx->iv_set = 0; 1585 gctx->ivlen = ivlen; 1586 gctx->iv = iv; 1587 gctx->taglen = -1; 1588 gctx->iv_gen = 0; 1589 gctx->tls_aad_len = -1; 1590 return 1; 1591 1592 case EVP_CTRL_AEAD_SET_IVLEN: 1593 if (arg <= 0) 1594 return 0; 1595 1596 if (arg != 12) { 1597 iv = EVP_CIPHER_CTX_iv_noconst(c); 1598 len = S390X_gcm_ivpadlen(arg); 1599 1600 /* Allocate memory for iv if needed. */ 1601 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) { 1602 if (gctx->iv != iv) 1603 OPENSSL_free(gctx->iv); 1604 1605 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) { 1606 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 1607 return 0; 1608 } 1609 } 1610 /* Add padding. */ 1611 memset(gctx->iv + arg, 0, len - arg - 8); 1612 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3; 1613 } 1614 gctx->ivlen = arg; 1615 return 1; 1616 1617 case EVP_CTRL_AEAD_SET_TAG: 1618 buf = EVP_CIPHER_CTX_buf_noconst(c); 1619 enc = EVP_CIPHER_CTX_encrypting(c); 1620 if (arg <= 0 || arg > 16 || enc) 1621 return 0; 1622 1623 memcpy(buf, ptr, arg); 1624 gctx->taglen = arg; 1625 return 1; 1626 1627 case EVP_CTRL_AEAD_GET_TAG: 1628 enc = EVP_CIPHER_CTX_encrypting(c); 1629 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0) 1630 return 0; 1631 1632 memcpy(ptr, gctx->kma.param.t.b, arg); 1633 return 1; 1634 1635 case EVP_CTRL_GCM_SET_IV_FIXED: 1636 /* Special case: -1 length restores whole iv */ 1637 if (arg == -1) { 1638 memcpy(gctx->iv, ptr, gctx->ivlen); 1639 gctx->iv_gen = 1; 1640 return 1; 1641 } 1642 /* 1643 * Fixed field must be at least 4 bytes and invocation field at least 1644 * 8. 1645 */ 1646 if ((arg < 4) || (gctx->ivlen - arg) < 8) 1647 return 0; 1648 1649 if (arg) 1650 memcpy(gctx->iv, ptr, arg); 1651 1652 enc = EVP_CIPHER_CTX_encrypting(c); 1653 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) 1654 return 0; 1655 1656 gctx->iv_gen = 1; 1657 return 1; 1658 1659 case EVP_CTRL_GCM_IV_GEN: 1660 if (gctx->iv_gen == 0 || gctx->key_set == 0) 1661 return 0; 1662 1663 s390x_aes_gcm_setiv(gctx, gctx->iv); 1664 1665 if (arg <= 0 || arg > gctx->ivlen) 1666 arg = gctx->ivlen; 1667 1668 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); 1669 /* 1670 * Invocation field will be at least 8 bytes in size and so no need 1671 * to check wrap around or increment more than last 8 bytes. 1672 */ 1673 ctr64_inc(gctx->iv + gctx->ivlen - 8); 1674 gctx->iv_set = 1; 1675 return 1; 1676 1677 case EVP_CTRL_GCM_SET_IV_INV: 1678 enc = EVP_CIPHER_CTX_encrypting(c); 1679 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc) 1680 return 0; 1681 1682 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); 1683 s390x_aes_gcm_setiv(gctx, gctx->iv); 1684 gctx->iv_set = 1; 1685 return 1; 1686 1687 case EVP_CTRL_AEAD_TLS1_AAD: 1688 /* Save the aad for later use. */ 1689 if (arg != EVP_AEAD_TLS1_AAD_LEN) 1690 return 0; 1691 1692 buf = EVP_CIPHER_CTX_buf_noconst(c); 1693 memcpy(buf, ptr, arg); 1694 gctx->tls_aad_len = arg; 1695 1696 len = buf[arg - 2] << 8 | buf[arg - 1]; 1697 /* Correct length for explicit iv. */ 1698 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) 1699 return 0; 1700 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; 1701 1702 /* If decrypting correct for tag too. */ 1703 enc = EVP_CIPHER_CTX_encrypting(c); 1704 if (!enc) { 1705 if (len < EVP_GCM_TLS_TAG_LEN) 1706 return 0; 1707 len -= EVP_GCM_TLS_TAG_LEN; 1708 } 1709 buf[arg - 2] = len >> 8; 1710 buf[arg - 1] = len & 0xff; 1711 /* Extra padding: tag appended to record. */ 1712 return EVP_GCM_TLS_TAG_LEN; 1713 1714 case EVP_CTRL_COPY: 1715 out = ptr; 1716 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out); 1717 iv = EVP_CIPHER_CTX_iv_noconst(c); 1718 1719 if (gctx->iv == iv) { 1720 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out); 1721 } else { 1722 len = S390X_gcm_ivpadlen(gctx->ivlen); 1723 1724 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) { 1725 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 1726 return 0; 1727 } 1728 1729 memcpy(gctx_out->iv, gctx->iv, len); 1730 } 1731 return 1; 1732 1733 default: 1734 return -1; 1735 } 1736 } 1737 1738 /*- 1739 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned. 1740 */ 1741 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx, 1742 const unsigned char *key, 1743 const unsigned char *iv, int enc) 1744 { 1745 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1746 int keylen; 1747 1748 if (iv == NULL && key == NULL) 1749 return 1; 1750 1751 if (key != NULL) { 1752 keylen = EVP_CIPHER_CTX_key_length(ctx); 1753 memcpy(&gctx->kma.param.k, key, keylen); 1754 1755 gctx->fc = S390X_AES_FC(keylen); 1756 if (!enc) 1757 gctx->fc |= S390X_DECRYPT; 1758 1759 if (iv == NULL && gctx->iv_set) 1760 iv = gctx->iv; 1761 1762 if (iv != NULL) { 1763 s390x_aes_gcm_setiv(gctx, iv); 1764 gctx->iv_set = 1; 1765 } 1766 gctx->key_set = 1; 1767 } else { 1768 if (gctx->key_set) 1769 s390x_aes_gcm_setiv(gctx, iv); 1770 else 1771 memcpy(gctx->iv, iv, gctx->ivlen); 1772 1773 gctx->iv_set = 1; 1774 gctx->iv_gen = 0; 1775 } 1776 return 1; 1777 } 1778 1779 /*- 1780 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written 1781 * if successful. Otherwise -1 is returned. Code is big-endian. 1782 */ 1783 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1784 const unsigned char *in, size_t len) 1785 { 1786 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1787 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx); 1788 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 1789 int rv = -1; 1790 1791 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN)) 1792 return -1; 1793 1794 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN 1795 : EVP_CTRL_GCM_SET_IV_INV, 1796 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) 1797 goto err; 1798 1799 in += EVP_GCM_TLS_EXPLICIT_IV_LEN; 1800 out += EVP_GCM_TLS_EXPLICIT_IV_LEN; 1801 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 1802 1803 gctx->kma.param.taadl = gctx->tls_aad_len << 3; 1804 gctx->kma.param.tpcl = len << 3; 1805 s390x_kma(buf, gctx->tls_aad_len, in, len, out, 1806 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param); 1807 1808 if (enc) { 1809 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN); 1810 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 1811 } else { 1812 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len, 1813 EVP_GCM_TLS_TAG_LEN)) { 1814 OPENSSL_cleanse(out, len); 1815 goto err; 1816 } 1817 rv = len; 1818 } 1819 err: 1820 gctx->iv_set = 0; 1821 gctx->tls_aad_len = -1; 1822 return rv; 1823 } 1824 1825 /*- 1826 * Called from EVP layer to initialize context, process additional 1827 * authenticated data, en/de-crypt plain/cipher-text and authenticate 1828 * ciphertext or process a TLS packet, depending on context. Returns bytes 1829 * written on success. Otherwise -1 is returned. Code is big-endian. 1830 */ 1831 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1832 const unsigned char *in, size_t len) 1833 { 1834 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); 1835 unsigned char *buf, tmp[16]; 1836 int enc; 1837 1838 if (!gctx->key_set) 1839 return -1; 1840 1841 if (gctx->tls_aad_len >= 0) 1842 return s390x_aes_gcm_tls_cipher(ctx, out, in, len); 1843 1844 if (!gctx->iv_set) 1845 return -1; 1846 1847 if (in != NULL) { 1848 if (out == NULL) { 1849 if (s390x_aes_gcm_aad(gctx, in, len)) 1850 return -1; 1851 } else { 1852 if (s390x_aes_gcm(gctx, in, out, len)) 1853 return -1; 1854 } 1855 return len; 1856 } else { 1857 gctx->kma.param.taadl <<= 3; 1858 gctx->kma.param.tpcl <<= 3; 1859 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp, 1860 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param); 1861 /* recall that we already did en-/decrypt gctx->mres 1862 * and returned it to caller... */ 1863 OPENSSL_cleanse(tmp, gctx->mreslen); 1864 gctx->iv_set = 0; 1865 1866 enc = EVP_CIPHER_CTX_encrypting(ctx); 1867 if (enc) { 1868 gctx->taglen = 16; 1869 } else { 1870 if (gctx->taglen < 0) 1871 return -1; 1872 1873 buf = EVP_CIPHER_CTX_buf_noconst(ctx); 1874 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen)) 1875 return -1; 1876 } 1877 return 0; 1878 } 1879 } 1880 1881 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c) 1882 { 1883 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c); 1884 const unsigned char *iv; 1885 1886 if (gctx == NULL) 1887 return 0; 1888 1889 iv = EVP_CIPHER_CTX_iv(c); 1890 if (iv != gctx->iv) 1891 OPENSSL_free(gctx->iv); 1892 1893 OPENSSL_cleanse(gctx, sizeof(*gctx)); 1894 return 1; 1895 } 1896 1897 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX 1898 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */ 1899 # define S390X_aes_256_xts_CAPABLE 1 1900 1901 # define s390x_aes_xts_init_key aes_xts_init_key 1902 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx, 1903 const unsigned char *key, 1904 const unsigned char *iv, int enc); 1905 # define s390x_aes_xts_cipher aes_xts_cipher 1906 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 1907 const unsigned char *in, size_t len); 1908 # define s390x_aes_xts_ctrl aes_xts_ctrl 1909 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr); 1910 # define s390x_aes_xts_cleanup aes_xts_cleanup 1911 1912 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \ 1913 (OPENSSL_s390xcap_P.kmac[0] & \ 1914 S390X_CAPBIT(S390X_AES_128))) 1915 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \ 1916 (OPENSSL_s390xcap_P.kmac[0] & \ 1917 S390X_CAPBIT(S390X_AES_192))) 1918 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \ 1919 (OPENSSL_s390xcap_P.kmac[0] & \ 1920 S390X_CAPBIT(S390X_AES_256))) 1921 1922 # define S390X_CCM_AAD_FLAG 0x40 1923 1924 /*- 1925 * Set nonce and length fields. Code is big-endian. 1926 */ 1927 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx, 1928 const unsigned char *nonce, 1929 size_t mlen) 1930 { 1931 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG; 1932 ctx->aes.ccm.nonce.g[1] = mlen; 1933 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l); 1934 } 1935 1936 /*- 1937 * Process additional authenticated data. Code is big-endian. 1938 */ 1939 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad, 1940 size_t alen) 1941 { 1942 unsigned char *ptr; 1943 int i, rem; 1944 1945 if (!alen) 1946 return; 1947 1948 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG; 1949 1950 /* Suppress 'type-punned pointer dereference' warning. */ 1951 ptr = ctx->aes.ccm.buf.b; 1952 1953 if (alen < ((1 << 16) - (1 << 8))) { 1954 *(uint16_t *)ptr = alen; 1955 i = 2; 1956 } else if (sizeof(alen) == 8 1957 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) { 1958 *(uint16_t *)ptr = 0xffff; 1959 *(uint64_t *)(ptr + 2) = alen; 1960 i = 10; 1961 } else { 1962 *(uint16_t *)ptr = 0xfffe; 1963 *(uint32_t *)(ptr + 2) = alen; 1964 i = 6; 1965 } 1966 1967 while (i < 16 && alen) { 1968 ctx->aes.ccm.buf.b[i] = *aad; 1969 ++aad; 1970 --alen; 1971 ++i; 1972 } 1973 while (i < 16) { 1974 ctx->aes.ccm.buf.b[i] = 0; 1975 ++i; 1976 } 1977 1978 ctx->aes.ccm.kmac_param.icv.g[0] = 0; 1979 ctx->aes.ccm.kmac_param.icv.g[1] = 0; 1980 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc, 1981 &ctx->aes.ccm.kmac_param); 1982 ctx->aes.ccm.blocks += 2; 1983 1984 rem = alen & 0xf; 1985 alen &= ~(size_t)0xf; 1986 if (alen) { 1987 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 1988 ctx->aes.ccm.blocks += alen >> 4; 1989 aad += alen; 1990 } 1991 if (rem) { 1992 for (i = 0; i < rem; i++) 1993 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i]; 1994 1995 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 1996 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 1997 ctx->aes.ccm.kmac_param.k); 1998 ctx->aes.ccm.blocks++; 1999 } 2000 } 2001 2002 /*- 2003 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for 2004 * success. 2005 */ 2006 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in, 2007 unsigned char *out, size_t len, int enc) 2008 { 2009 size_t n, rem; 2010 unsigned int i, l, num; 2011 unsigned char flags; 2012 2013 flags = ctx->aes.ccm.nonce.b[0]; 2014 if (!(flags & S390X_CCM_AAD_FLAG)) { 2015 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b, 2016 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k); 2017 ctx->aes.ccm.blocks++; 2018 } 2019 l = flags & 0x7; 2020 ctx->aes.ccm.nonce.b[0] = l; 2021 2022 /*- 2023 * Reconstruct length from encoded length field 2024 * and initialize it with counter value. 2025 */ 2026 n = 0; 2027 for (i = 15 - l; i < 15; i++) { 2028 n |= ctx->aes.ccm.nonce.b[i]; 2029 ctx->aes.ccm.nonce.b[i] = 0; 2030 n <<= 8; 2031 } 2032 n |= ctx->aes.ccm.nonce.b[15]; 2033 ctx->aes.ccm.nonce.b[15] = 1; 2034 2035 if (n != len) 2036 return -1; /* length mismatch */ 2037 2038 if (enc) { 2039 /* Two operations per block plus one for tag encryption */ 2040 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1; 2041 if (ctx->aes.ccm.blocks > (1ULL << 61)) 2042 return -2; /* too much data */ 2043 } 2044 2045 num = 0; 2046 rem = len & 0xf; 2047 len &= ~(size_t)0xf; 2048 2049 if (enc) { 2050 /* mac-then-encrypt */ 2051 if (len) 2052 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 2053 if (rem) { 2054 for (i = 0; i < rem; i++) 2055 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i]; 2056 2057 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 2058 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 2059 ctx->aes.ccm.kmac_param.k); 2060 } 2061 2062 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k, 2063 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b, 2064 &num, (ctr128_f)AES_ctr32_encrypt); 2065 } else { 2066 /* decrypt-then-mac */ 2067 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k, 2068 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b, 2069 &num, (ctr128_f)AES_ctr32_encrypt); 2070 2071 if (len) 2072 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param); 2073 if (rem) { 2074 for (i = 0; i < rem; i++) 2075 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i]; 2076 2077 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16, 2078 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc, 2079 ctx->aes.ccm.kmac_param.k); 2080 } 2081 } 2082 /* encrypt tag */ 2083 for (i = 15 - l; i < 16; i++) 2084 ctx->aes.ccm.nonce.b[i] = 0; 2085 2086 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc, 2087 ctx->aes.ccm.kmac_param.k); 2088 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0]; 2089 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1]; 2090 2091 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */ 2092 return 0; 2093 } 2094 2095 /*- 2096 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written 2097 * if successful. Otherwise -1 is returned. 2098 */ 2099 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2100 const unsigned char *in, size_t len) 2101 { 2102 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2103 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2104 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx); 2105 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 2106 2107 if (out != in 2108 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m)) 2109 return -1; 2110 2111 if (enc) { 2112 /* Set explicit iv (sequence number). */ 2113 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN); 2114 } 2115 2116 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m; 2117 /*- 2118 * Get explicit iv (sequence number). We already have fixed iv 2119 * (server/client_write_iv) here. 2120 */ 2121 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN); 2122 s390x_aes_ccm_setiv(cctx, ivec, len); 2123 2124 /* Process aad (sequence number|type|version|length) */ 2125 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len); 2126 2127 in += EVP_CCM_TLS_EXPLICIT_IV_LEN; 2128 out += EVP_CCM_TLS_EXPLICIT_IV_LEN; 2129 2130 if (enc) { 2131 if (s390x_aes_ccm(cctx, in, out, len, enc)) 2132 return -1; 2133 2134 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m); 2135 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m; 2136 } else { 2137 if (!s390x_aes_ccm(cctx, in, out, len, enc)) { 2138 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len, 2139 cctx->aes.ccm.m)) 2140 return len; 2141 } 2142 2143 OPENSSL_cleanse(out, len); 2144 return -1; 2145 } 2146 } 2147 2148 /*- 2149 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is 2150 * returned. 2151 */ 2152 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx, 2153 const unsigned char *key, 2154 const unsigned char *iv, int enc) 2155 { 2156 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2157 unsigned char *ivec; 2158 int keylen; 2159 2160 if (iv == NULL && key == NULL) 2161 return 1; 2162 2163 if (key != NULL) { 2164 keylen = EVP_CIPHER_CTX_key_length(ctx); 2165 cctx->aes.ccm.fc = S390X_AES_FC(keylen); 2166 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen); 2167 2168 /* Store encoded m and l. */ 2169 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7) 2170 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3; 2171 memset(cctx->aes.ccm.nonce.b + 1, 0, 2172 sizeof(cctx->aes.ccm.nonce.b)); 2173 cctx->aes.ccm.blocks = 0; 2174 2175 cctx->aes.ccm.key_set = 1; 2176 } 2177 2178 if (iv != NULL) { 2179 ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2180 memcpy(ivec, iv, 15 - cctx->aes.ccm.l); 2181 2182 cctx->aes.ccm.iv_set = 1; 2183 } 2184 2185 return 1; 2186 } 2187 2188 /*- 2189 * Called from EVP layer to initialize context, process additional 2190 * authenticated data, en/de-crypt plain/cipher-text and authenticate 2191 * plaintext or process a TLS packet, depending on context. Returns bytes 2192 * written on success. Otherwise -1 is returned. 2193 */ 2194 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2195 const unsigned char *in, size_t len) 2196 { 2197 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx); 2198 const int enc = EVP_CIPHER_CTX_encrypting(ctx); 2199 int rv; 2200 unsigned char *buf, *ivec; 2201 2202 if (!cctx->aes.ccm.key_set) 2203 return -1; 2204 2205 if (cctx->aes.ccm.tls_aad_len >= 0) 2206 return s390x_aes_ccm_tls_cipher(ctx, out, in, len); 2207 2208 /*- 2209 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt 2210 * so integrity must be checked already at Update() i.e., before 2211 * potentially corrupted data is output. 2212 */ 2213 if (in == NULL && out != NULL) 2214 return 0; 2215 2216 if (!cctx->aes.ccm.iv_set) 2217 return -1; 2218 2219 if (out == NULL) { 2220 /* Update(): Pass message length. */ 2221 if (in == NULL) { 2222 ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2223 s390x_aes_ccm_setiv(cctx, ivec, len); 2224 2225 cctx->aes.ccm.len_set = 1; 2226 return len; 2227 } 2228 2229 /* Update(): Process aad. */ 2230 if (!cctx->aes.ccm.len_set && len) 2231 return -1; 2232 2233 s390x_aes_ccm_aad(cctx, in, len); 2234 return len; 2235 } 2236 2237 /* The tag must be set before actually decrypting data */ 2238 if (!enc && !cctx->aes.ccm.tag_set) 2239 return -1; 2240 2241 /* Update(): Process message. */ 2242 2243 if (!cctx->aes.ccm.len_set) { 2244 /*- 2245 * In case message length was not previously set explicitly via 2246 * Update(), set it now. 2247 */ 2248 ivec = EVP_CIPHER_CTX_iv_noconst(ctx); 2249 s390x_aes_ccm_setiv(cctx, ivec, len); 2250 2251 cctx->aes.ccm.len_set = 1; 2252 } 2253 2254 if (enc) { 2255 if (s390x_aes_ccm(cctx, in, out, len, enc)) 2256 return -1; 2257 2258 cctx->aes.ccm.tag_set = 1; 2259 return len; 2260 } else { 2261 rv = -1; 2262 2263 if (!s390x_aes_ccm(cctx, in, out, len, enc)) { 2264 buf = EVP_CIPHER_CTX_buf_noconst(ctx); 2265 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf, 2266 cctx->aes.ccm.m)) 2267 rv = len; 2268 } 2269 2270 if (rv == -1) 2271 OPENSSL_cleanse(out, len); 2272 2273 cctx->aes.ccm.iv_set = 0; 2274 cctx->aes.ccm.tag_set = 0; 2275 cctx->aes.ccm.len_set = 0; 2276 return rv; 2277 } 2278 } 2279 2280 /*- 2281 * Performs various operations on the context structure depending on control 2282 * type. Returns 1 for success, 0 for failure and -1 for unknown control type. 2283 * Code is big-endian. 2284 */ 2285 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 2286 { 2287 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c); 2288 unsigned char *buf, *iv; 2289 int enc, len; 2290 2291 switch (type) { 2292 case EVP_CTRL_INIT: 2293 cctx->aes.ccm.key_set = 0; 2294 cctx->aes.ccm.iv_set = 0; 2295 cctx->aes.ccm.l = 8; 2296 cctx->aes.ccm.m = 12; 2297 cctx->aes.ccm.tag_set = 0; 2298 cctx->aes.ccm.len_set = 0; 2299 cctx->aes.ccm.tls_aad_len = -1; 2300 return 1; 2301 2302 case EVP_CTRL_AEAD_TLS1_AAD: 2303 if (arg != EVP_AEAD_TLS1_AAD_LEN) 2304 return 0; 2305 2306 /* Save the aad for later use. */ 2307 buf = EVP_CIPHER_CTX_buf_noconst(c); 2308 memcpy(buf, ptr, arg); 2309 cctx->aes.ccm.tls_aad_len = arg; 2310 2311 len = buf[arg - 2] << 8 | buf[arg - 1]; 2312 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN) 2313 return 0; 2314 2315 /* Correct length for explicit iv. */ 2316 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN; 2317 2318 enc = EVP_CIPHER_CTX_encrypting(c); 2319 if (!enc) { 2320 if (len < cctx->aes.ccm.m) 2321 return 0; 2322 2323 /* Correct length for tag. */ 2324 len -= cctx->aes.ccm.m; 2325 } 2326 2327 buf[arg - 2] = len >> 8; 2328 buf[arg - 1] = len & 0xff; 2329 2330 /* Extra padding: tag appended to record. */ 2331 return cctx->aes.ccm.m; 2332 2333 case EVP_CTRL_CCM_SET_IV_FIXED: 2334 if (arg != EVP_CCM_TLS_FIXED_IV_LEN) 2335 return 0; 2336 2337 /* Copy to first part of the iv. */ 2338 iv = EVP_CIPHER_CTX_iv_noconst(c); 2339 memcpy(iv, ptr, arg); 2340 return 1; 2341 2342 case EVP_CTRL_AEAD_SET_IVLEN: 2343 arg = 15 - arg; 2344 /* fall-through */ 2345 2346 case EVP_CTRL_CCM_SET_L: 2347 if (arg < 2 || arg > 8) 2348 return 0; 2349 2350 cctx->aes.ccm.l = arg; 2351 return 1; 2352 2353 case EVP_CTRL_AEAD_SET_TAG: 2354 if ((arg & 1) || arg < 4 || arg > 16) 2355 return 0; 2356 2357 enc = EVP_CIPHER_CTX_encrypting(c); 2358 if (enc && ptr) 2359 return 0; 2360 2361 if (ptr) { 2362 cctx->aes.ccm.tag_set = 1; 2363 buf = EVP_CIPHER_CTX_buf_noconst(c); 2364 memcpy(buf, ptr, arg); 2365 } 2366 2367 cctx->aes.ccm.m = arg; 2368 return 1; 2369 2370 case EVP_CTRL_AEAD_GET_TAG: 2371 enc = EVP_CIPHER_CTX_encrypting(c); 2372 if (!enc || !cctx->aes.ccm.tag_set) 2373 return 0; 2374 2375 if(arg < cctx->aes.ccm.m) 2376 return 0; 2377 2378 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m); 2379 cctx->aes.ccm.tag_set = 0; 2380 cctx->aes.ccm.iv_set = 0; 2381 cctx->aes.ccm.len_set = 0; 2382 return 1; 2383 2384 case EVP_CTRL_COPY: 2385 return 1; 2386 2387 default: 2388 return -1; 2389 } 2390 } 2391 2392 # define s390x_aes_ccm_cleanup aes_ccm_cleanup 2393 2394 # ifndef OPENSSL_NO_OCB 2395 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX 2396 # define S390X_aes_128_ocb_CAPABLE 0 2397 # define S390X_aes_192_ocb_CAPABLE 0 2398 # define S390X_aes_256_ocb_CAPABLE 0 2399 2400 # define s390x_aes_ocb_init_key aes_ocb_init_key 2401 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 2402 const unsigned char *iv, int enc); 2403 # define s390x_aes_ocb_cipher aes_ocb_cipher 2404 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2405 const unsigned char *in, size_t len); 2406 # define s390x_aes_ocb_cleanup aes_ocb_cleanup 2407 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *); 2408 # define s390x_aes_ocb_ctrl aes_ocb_ctrl 2409 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr); 2410 # endif 2411 2412 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \ 2413 MODE,flags) \ 2414 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \ 2415 nid##_##keylen##_##nmode,blocksize, \ 2416 keylen / 8, \ 2417 ivlen, \ 2418 flags | EVP_CIPH_##MODE##_MODE, \ 2419 s390x_aes_##mode##_init_key, \ 2420 s390x_aes_##mode##_cipher, \ 2421 NULL, \ 2422 sizeof(S390X_AES_##MODE##_CTX), \ 2423 NULL, \ 2424 NULL, \ 2425 NULL, \ 2426 NULL \ 2427 }; \ 2428 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2429 nid##_##keylen##_##nmode, \ 2430 blocksize, \ 2431 keylen / 8, \ 2432 ivlen, \ 2433 flags | EVP_CIPH_##MODE##_MODE, \ 2434 aes_init_key, \ 2435 aes_##mode##_cipher, \ 2436 NULL, \ 2437 sizeof(EVP_AES_KEY), \ 2438 NULL, \ 2439 NULL, \ 2440 NULL, \ 2441 NULL \ 2442 }; \ 2443 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2444 { \ 2445 return S390X_aes_##keylen##_##mode##_CAPABLE ? \ 2446 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \ 2447 } 2448 2449 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\ 2450 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \ 2451 nid##_##keylen##_##mode, \ 2452 blocksize, \ 2453 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \ 2454 ivlen, \ 2455 flags | EVP_CIPH_##MODE##_MODE, \ 2456 s390x_aes_##mode##_init_key, \ 2457 s390x_aes_##mode##_cipher, \ 2458 s390x_aes_##mode##_cleanup, \ 2459 sizeof(S390X_AES_##MODE##_CTX), \ 2460 NULL, \ 2461 NULL, \ 2462 s390x_aes_##mode##_ctrl, \ 2463 NULL \ 2464 }; \ 2465 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2466 nid##_##keylen##_##mode,blocksize, \ 2467 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \ 2468 ivlen, \ 2469 flags | EVP_CIPH_##MODE##_MODE, \ 2470 aes_##mode##_init_key, \ 2471 aes_##mode##_cipher, \ 2472 aes_##mode##_cleanup, \ 2473 sizeof(EVP_AES_##MODE##_CTX), \ 2474 NULL, \ 2475 NULL, \ 2476 aes_##mode##_ctrl, \ 2477 NULL \ 2478 }; \ 2479 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2480 { \ 2481 return S390X_aes_##keylen##_##mode##_CAPABLE ? \ 2482 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \ 2483 } 2484 2485 #else 2486 2487 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \ 2488 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2489 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \ 2490 flags|EVP_CIPH_##MODE##_MODE, \ 2491 aes_init_key, \ 2492 aes_##mode##_cipher, \ 2493 NULL, \ 2494 sizeof(EVP_AES_KEY), \ 2495 NULL,NULL,NULL,NULL }; \ 2496 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2497 { return &aes_##keylen##_##mode; } 2498 2499 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \ 2500 static const EVP_CIPHER aes_##keylen##_##mode = { \ 2501 nid##_##keylen##_##mode,blocksize, \ 2502 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \ 2503 flags|EVP_CIPH_##MODE##_MODE, \ 2504 aes_##mode##_init_key, \ 2505 aes_##mode##_cipher, \ 2506 aes_##mode##_cleanup, \ 2507 sizeof(EVP_AES_##MODE##_CTX), \ 2508 NULL,NULL,aes_##mode##_ctrl,NULL }; \ 2509 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \ 2510 { return &aes_##keylen##_##mode; } 2511 2512 #endif 2513 2514 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__)) 2515 # include "arm_arch.h" 2516 # if __ARM_MAX_ARCH__>=7 2517 # if defined(BSAES_ASM) 2518 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON) 2519 # endif 2520 # if defined(VPAES_ASM) 2521 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON) 2522 # endif 2523 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES) 2524 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key 2525 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key 2526 # define HWAES_encrypt aes_v8_encrypt 2527 # define HWAES_decrypt aes_v8_decrypt 2528 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt 2529 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks 2530 # endif 2531 #endif 2532 2533 #if defined(HWAES_CAPABLE) 2534 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits, 2535 AES_KEY *key); 2536 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits, 2537 AES_KEY *key); 2538 void HWAES_encrypt(const unsigned char *in, unsigned char *out, 2539 const AES_KEY *key); 2540 void HWAES_decrypt(const unsigned char *in, unsigned char *out, 2541 const AES_KEY *key); 2542 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out, 2543 size_t length, const AES_KEY *key, 2544 unsigned char *ivec, const int enc); 2545 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out, 2546 size_t len, const AES_KEY *key, 2547 const unsigned char ivec[16]); 2548 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out, 2549 size_t len, const AES_KEY *key1, 2550 const AES_KEY *key2, const unsigned char iv[16]); 2551 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out, 2552 size_t len, const AES_KEY *key1, 2553 const AES_KEY *key2, const unsigned char iv[16]); 2554 #endif 2555 2556 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \ 2557 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2558 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2559 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2560 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \ 2561 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \ 2562 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \ 2563 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags) 2564 2565 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 2566 const unsigned char *iv, int enc) 2567 { 2568 int ret, mode; 2569 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2570 2571 mode = EVP_CIPHER_CTX_mode(ctx); 2572 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) 2573 && !enc) { 2574 #ifdef HWAES_CAPABLE 2575 if (HWAES_CAPABLE) { 2576 ret = HWAES_set_decrypt_key(key, 2577 EVP_CIPHER_CTX_key_length(ctx) * 8, 2578 &dat->ks.ks); 2579 dat->block = (block128_f) HWAES_decrypt; 2580 dat->stream.cbc = NULL; 2581 # ifdef HWAES_cbc_encrypt 2582 if (mode == EVP_CIPH_CBC_MODE) 2583 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt; 2584 # endif 2585 } else 2586 #endif 2587 #ifdef BSAES_CAPABLE 2588 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) { 2589 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2590 &dat->ks.ks); 2591 dat->block = (block128_f) AES_decrypt; 2592 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt; 2593 } else 2594 #endif 2595 #ifdef VPAES_CAPABLE 2596 if (VPAES_CAPABLE) { 2597 ret = vpaes_set_decrypt_key(key, 2598 EVP_CIPHER_CTX_key_length(ctx) * 8, 2599 &dat->ks.ks); 2600 dat->block = (block128_f) vpaes_decrypt; 2601 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2602 (cbc128_f) vpaes_cbc_encrypt : NULL; 2603 } else 2604 #endif 2605 { 2606 ret = AES_set_decrypt_key(key, 2607 EVP_CIPHER_CTX_key_length(ctx) * 8, 2608 &dat->ks.ks); 2609 dat->block = (block128_f) AES_decrypt; 2610 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2611 (cbc128_f) AES_cbc_encrypt : NULL; 2612 } 2613 } else 2614 #ifdef HWAES_CAPABLE 2615 if (HWAES_CAPABLE) { 2616 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2617 &dat->ks.ks); 2618 dat->block = (block128_f) HWAES_encrypt; 2619 dat->stream.cbc = NULL; 2620 # ifdef HWAES_cbc_encrypt 2621 if (mode == EVP_CIPH_CBC_MODE) 2622 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt; 2623 else 2624 # endif 2625 # ifdef HWAES_ctr32_encrypt_blocks 2626 if (mode == EVP_CIPH_CTR_MODE) 2627 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; 2628 else 2629 # endif 2630 (void)0; /* terminate potentially open 'else' */ 2631 } else 2632 #endif 2633 #ifdef BSAES_CAPABLE 2634 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) { 2635 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2636 &dat->ks.ks); 2637 dat->block = (block128_f) AES_encrypt; 2638 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; 2639 } else 2640 #endif 2641 #ifdef VPAES_CAPABLE 2642 if (VPAES_CAPABLE) { 2643 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2644 &dat->ks.ks); 2645 dat->block = (block128_f) vpaes_encrypt; 2646 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2647 (cbc128_f) vpaes_cbc_encrypt : NULL; 2648 } else 2649 #endif 2650 { 2651 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 2652 &dat->ks.ks); 2653 dat->block = (block128_f) AES_encrypt; 2654 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? 2655 (cbc128_f) AES_cbc_encrypt : NULL; 2656 #ifdef AES_CTR_ASM 2657 if (mode == EVP_CIPH_CTR_MODE) 2658 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt; 2659 #endif 2660 } 2661 2662 if (ret < 0) { 2663 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED); 2664 return 0; 2665 } 2666 2667 return 1; 2668 } 2669 2670 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2671 const unsigned char *in, size_t len) 2672 { 2673 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2674 2675 if (dat->stream.cbc) 2676 (*dat->stream.cbc) (in, out, len, &dat->ks, 2677 EVP_CIPHER_CTX_iv_noconst(ctx), 2678 EVP_CIPHER_CTX_encrypting(ctx)); 2679 else if (EVP_CIPHER_CTX_encrypting(ctx)) 2680 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, 2681 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block); 2682 else 2683 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, 2684 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block); 2685 2686 return 1; 2687 } 2688 2689 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2690 const unsigned char *in, size_t len) 2691 { 2692 size_t bl = EVP_CIPHER_CTX_block_size(ctx); 2693 size_t i; 2694 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2695 2696 if (len < bl) 2697 return 1; 2698 2699 for (i = 0, len -= bl; i <= len; i += bl) 2700 (*dat->block) (in + i, out + i, &dat->ks); 2701 2702 return 1; 2703 } 2704 2705 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2706 const unsigned char *in, size_t len) 2707 { 2708 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2709 2710 int num = EVP_CIPHER_CTX_num(ctx); 2711 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, 2712 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block); 2713 EVP_CIPHER_CTX_set_num(ctx, num); 2714 return 1; 2715 } 2716 2717 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2718 const unsigned char *in, size_t len) 2719 { 2720 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2721 2722 int num = EVP_CIPHER_CTX_num(ctx); 2723 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks, 2724 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2725 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2726 EVP_CIPHER_CTX_set_num(ctx, num); 2727 return 1; 2728 } 2729 2730 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2731 const unsigned char *in, size_t len) 2732 { 2733 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2734 2735 int num = EVP_CIPHER_CTX_num(ctx); 2736 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks, 2737 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2738 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2739 EVP_CIPHER_CTX_set_num(ctx, num); 2740 return 1; 2741 } 2742 2743 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2744 const unsigned char *in, size_t len) 2745 { 2746 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2747 2748 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) { 2749 int num = EVP_CIPHER_CTX_num(ctx); 2750 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks, 2751 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2752 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2753 EVP_CIPHER_CTX_set_num(ctx, num); 2754 return 1; 2755 } 2756 2757 while (len >= MAXBITCHUNK) { 2758 int num = EVP_CIPHER_CTX_num(ctx); 2759 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks, 2760 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2761 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2762 EVP_CIPHER_CTX_set_num(ctx, num); 2763 len -= MAXBITCHUNK; 2764 out += MAXBITCHUNK; 2765 in += MAXBITCHUNK; 2766 } 2767 if (len) { 2768 int num = EVP_CIPHER_CTX_num(ctx); 2769 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks, 2770 EVP_CIPHER_CTX_iv_noconst(ctx), &num, 2771 EVP_CIPHER_CTX_encrypting(ctx), dat->block); 2772 EVP_CIPHER_CTX_set_num(ctx, num); 2773 } 2774 2775 return 1; 2776 } 2777 2778 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 2779 const unsigned char *in, size_t len) 2780 { 2781 unsigned int num = EVP_CIPHER_CTX_num(ctx); 2782 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx); 2783 2784 if (dat->stream.ctr) 2785 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, 2786 EVP_CIPHER_CTX_iv_noconst(ctx), 2787 EVP_CIPHER_CTX_buf_noconst(ctx), 2788 &num, dat->stream.ctr); 2789 else 2790 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, 2791 EVP_CIPHER_CTX_iv_noconst(ctx), 2792 EVP_CIPHER_CTX_buf_noconst(ctx), &num, 2793 dat->block); 2794 EVP_CIPHER_CTX_set_num(ctx, num); 2795 return 1; 2796 } 2797 2798 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0) 2799 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0) 2800 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0) 2801 2802 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c) 2803 { 2804 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c); 2805 if (gctx == NULL) 2806 return 0; 2807 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm)); 2808 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c)) 2809 OPENSSL_free(gctx->iv); 2810 return 1; 2811 } 2812 2813 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 2814 { 2815 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c); 2816 switch (type) { 2817 case EVP_CTRL_INIT: 2818 gctx->key_set = 0; 2819 gctx->iv_set = 0; 2820 gctx->ivlen = c->cipher->iv_len; 2821 gctx->iv = c->iv; 2822 gctx->taglen = -1; 2823 gctx->iv_gen = 0; 2824 gctx->tls_aad_len = -1; 2825 return 1; 2826 2827 case EVP_CTRL_AEAD_SET_IVLEN: 2828 if (arg <= 0) 2829 return 0; 2830 /* Allocate memory for IV if needed */ 2831 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) { 2832 if (gctx->iv != c->iv) 2833 OPENSSL_free(gctx->iv); 2834 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) { 2835 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 2836 return 0; 2837 } 2838 } 2839 gctx->ivlen = arg; 2840 return 1; 2841 2842 case EVP_CTRL_AEAD_SET_TAG: 2843 if (arg <= 0 || arg > 16 || c->encrypt) 2844 return 0; 2845 memcpy(c->buf, ptr, arg); 2846 gctx->taglen = arg; 2847 return 1; 2848 2849 case EVP_CTRL_AEAD_GET_TAG: 2850 if (arg <= 0 || arg > 16 || !c->encrypt 2851 || gctx->taglen < 0) 2852 return 0; 2853 memcpy(ptr, c->buf, arg); 2854 return 1; 2855 2856 case EVP_CTRL_GCM_SET_IV_FIXED: 2857 /* Special case: -1 length restores whole IV */ 2858 if (arg == -1) { 2859 memcpy(gctx->iv, ptr, gctx->ivlen); 2860 gctx->iv_gen = 1; 2861 return 1; 2862 } 2863 /* 2864 * Fixed field must be at least 4 bytes and invocation field at least 2865 * 8. 2866 */ 2867 if ((arg < 4) || (gctx->ivlen - arg) < 8) 2868 return 0; 2869 if (arg) 2870 memcpy(gctx->iv, ptr, arg); 2871 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) 2872 return 0; 2873 gctx->iv_gen = 1; 2874 return 1; 2875 2876 case EVP_CTRL_GCM_IV_GEN: 2877 if (gctx->iv_gen == 0 || gctx->key_set == 0) 2878 return 0; 2879 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); 2880 if (arg <= 0 || arg > gctx->ivlen) 2881 arg = gctx->ivlen; 2882 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); 2883 /* 2884 * Invocation field will be at least 8 bytes in size and so no need 2885 * to check wrap around or increment more than last 8 bytes. 2886 */ 2887 ctr64_inc(gctx->iv + gctx->ivlen - 8); 2888 gctx->iv_set = 1; 2889 return 1; 2890 2891 case EVP_CTRL_GCM_SET_IV_INV: 2892 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) 2893 return 0; 2894 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); 2895 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); 2896 gctx->iv_set = 1; 2897 return 1; 2898 2899 case EVP_CTRL_AEAD_TLS1_AAD: 2900 /* Save the AAD for later use */ 2901 if (arg != EVP_AEAD_TLS1_AAD_LEN) 2902 return 0; 2903 memcpy(c->buf, ptr, arg); 2904 gctx->tls_aad_len = arg; 2905 { 2906 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1]; 2907 /* Correct length for explicit IV */ 2908 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) 2909 return 0; 2910 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; 2911 /* If decrypting correct for tag too */ 2912 if (!c->encrypt) { 2913 if (len < EVP_GCM_TLS_TAG_LEN) 2914 return 0; 2915 len -= EVP_GCM_TLS_TAG_LEN; 2916 } 2917 c->buf[arg - 2] = len >> 8; 2918 c->buf[arg - 1] = len & 0xff; 2919 } 2920 /* Extra padding: tag appended to record */ 2921 return EVP_GCM_TLS_TAG_LEN; 2922 2923 case EVP_CTRL_COPY: 2924 { 2925 EVP_CIPHER_CTX *out = ptr; 2926 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out); 2927 if (gctx->gcm.key) { 2928 if (gctx->gcm.key != &gctx->ks) 2929 return 0; 2930 gctx_out->gcm.key = &gctx_out->ks; 2931 } 2932 if (gctx->iv == c->iv) 2933 gctx_out->iv = out->iv; 2934 else { 2935 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) { 2936 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE); 2937 return 0; 2938 } 2939 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen); 2940 } 2941 return 1; 2942 } 2943 2944 default: 2945 return -1; 2946 2947 } 2948 } 2949 2950 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 2951 const unsigned char *iv, int enc) 2952 { 2953 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 2954 if (!iv && !key) 2955 return 1; 2956 if (key) { 2957 do { 2958 #ifdef HWAES_CAPABLE 2959 if (HWAES_CAPABLE) { 2960 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 2961 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 2962 (block128_f) HWAES_encrypt); 2963 # ifdef HWAES_ctr32_encrypt_blocks 2964 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; 2965 # else 2966 gctx->ctr = NULL; 2967 # endif 2968 break; 2969 } else 2970 #endif 2971 #ifdef BSAES_CAPABLE 2972 if (BSAES_CAPABLE) { 2973 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 2974 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 2975 (block128_f) AES_encrypt); 2976 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks; 2977 break; 2978 } else 2979 #endif 2980 #ifdef VPAES_CAPABLE 2981 if (VPAES_CAPABLE) { 2982 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 2983 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 2984 (block128_f) vpaes_encrypt); 2985 gctx->ctr = NULL; 2986 break; 2987 } else 2988 #endif 2989 (void)0; /* terminate potentially open 'else' */ 2990 2991 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks); 2992 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, 2993 (block128_f) AES_encrypt); 2994 #ifdef AES_CTR_ASM 2995 gctx->ctr = (ctr128_f) AES_ctr32_encrypt; 2996 #else 2997 gctx->ctr = NULL; 2998 #endif 2999 } while (0); 3000 3001 /* 3002 * If we have an iv can set it directly, otherwise use saved IV. 3003 */ 3004 if (iv == NULL && gctx->iv_set) 3005 iv = gctx->iv; 3006 if (iv) { 3007 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 3008 gctx->iv_set = 1; 3009 } 3010 gctx->key_set = 1; 3011 } else { 3012 /* If key set use IV, otherwise copy */ 3013 if (gctx->key_set) 3014 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); 3015 else 3016 memcpy(gctx->iv, iv, gctx->ivlen); 3017 gctx->iv_set = 1; 3018 gctx->iv_gen = 0; 3019 } 3020 return 1; 3021 } 3022 3023 /* 3024 * Handle TLS GCM packet format. This consists of the last portion of the IV 3025 * followed by the payload and finally the tag. On encrypt generate IV, 3026 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload 3027 * and verify tag. 3028 */ 3029 3030 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3031 const unsigned char *in, size_t len) 3032 { 3033 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 3034 int rv = -1; 3035 /* Encrypt/decrypt must be performed in place */ 3036 if (out != in 3037 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN)) 3038 return -1; 3039 /* 3040 * Set IV from start of buffer or generate IV and write to start of 3041 * buffer. 3042 */ 3043 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN 3044 : EVP_CTRL_GCM_SET_IV_INV, 3045 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0) 3046 goto err; 3047 /* Use saved AAD */ 3048 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len)) 3049 goto err; 3050 /* Fix buffer and length to point to payload */ 3051 in += EVP_GCM_TLS_EXPLICIT_IV_LEN; 3052 out += EVP_GCM_TLS_EXPLICIT_IV_LEN; 3053 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 3054 if (ctx->encrypt) { 3055 /* Encrypt payload */ 3056 if (gctx->ctr) { 3057 size_t bulk = 0; 3058 #if defined(AES_GCM_ASM) 3059 if (len >= 32 && AES_GCM_ASM(gctx)) { 3060 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) 3061 return -1; 3062 3063 bulk = AES_gcm_encrypt(in, out, len, 3064 gctx->gcm.key, 3065 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3066 gctx->gcm.len.u[1] += bulk; 3067 } 3068 #endif 3069 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, 3070 in + bulk, 3071 out + bulk, 3072 len - bulk, gctx->ctr)) 3073 goto err; 3074 } else { 3075 size_t bulk = 0; 3076 #if defined(AES_GCM_ASM2) 3077 if (len >= 32 && AES_GCM_ASM2(gctx)) { 3078 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) 3079 return -1; 3080 3081 bulk = AES_gcm_encrypt(in, out, len, 3082 gctx->gcm.key, 3083 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3084 gctx->gcm.len.u[1] += bulk; 3085 } 3086 #endif 3087 if (CRYPTO_gcm128_encrypt(&gctx->gcm, 3088 in + bulk, out + bulk, len - bulk)) 3089 goto err; 3090 } 3091 out += len; 3092 /* Finally write tag */ 3093 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN); 3094 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN; 3095 } else { 3096 /* Decrypt */ 3097 if (gctx->ctr) { 3098 size_t bulk = 0; 3099 #if defined(AES_GCM_ASM) 3100 if (len >= 16 && AES_GCM_ASM(gctx)) { 3101 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) 3102 return -1; 3103 3104 bulk = AES_gcm_decrypt(in, out, len, 3105 gctx->gcm.key, 3106 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3107 gctx->gcm.len.u[1] += bulk; 3108 } 3109 #endif 3110 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, 3111 in + bulk, 3112 out + bulk, 3113 len - bulk, gctx->ctr)) 3114 goto err; 3115 } else { 3116 size_t bulk = 0; 3117 #if defined(AES_GCM_ASM2) 3118 if (len >= 16 && AES_GCM_ASM2(gctx)) { 3119 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) 3120 return -1; 3121 3122 bulk = AES_gcm_decrypt(in, out, len, 3123 gctx->gcm.key, 3124 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3125 gctx->gcm.len.u[1] += bulk; 3126 } 3127 #endif 3128 if (CRYPTO_gcm128_decrypt(&gctx->gcm, 3129 in + bulk, out + bulk, len - bulk)) 3130 goto err; 3131 } 3132 /* Retrieve tag */ 3133 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN); 3134 /* If tag mismatch wipe buffer */ 3135 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) { 3136 OPENSSL_cleanse(out, len); 3137 goto err; 3138 } 3139 rv = len; 3140 } 3141 3142 err: 3143 gctx->iv_set = 0; 3144 gctx->tls_aad_len = -1; 3145 return rv; 3146 } 3147 3148 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3149 const unsigned char *in, size_t len) 3150 { 3151 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); 3152 /* If not set up, return error */ 3153 if (!gctx->key_set) 3154 return -1; 3155 3156 if (gctx->tls_aad_len >= 0) 3157 return aes_gcm_tls_cipher(ctx, out, in, len); 3158 3159 if (!gctx->iv_set) 3160 return -1; 3161 if (in) { 3162 if (out == NULL) { 3163 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len)) 3164 return -1; 3165 } else if (ctx->encrypt) { 3166 if (gctx->ctr) { 3167 size_t bulk = 0; 3168 #if defined(AES_GCM_ASM) 3169 if (len >= 32 && AES_GCM_ASM(gctx)) { 3170 size_t res = (16 - gctx->gcm.mres) % 16; 3171 3172 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) 3173 return -1; 3174 3175 bulk = AES_gcm_encrypt(in + res, 3176 out + res, len - res, 3177 gctx->gcm.key, gctx->gcm.Yi.c, 3178 gctx->gcm.Xi.u); 3179 gctx->gcm.len.u[1] += bulk; 3180 bulk += res; 3181 } 3182 #endif 3183 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, 3184 in + bulk, 3185 out + bulk, 3186 len - bulk, gctx->ctr)) 3187 return -1; 3188 } else { 3189 size_t bulk = 0; 3190 #if defined(AES_GCM_ASM2) 3191 if (len >= 32 && AES_GCM_ASM2(gctx)) { 3192 size_t res = (16 - gctx->gcm.mres) % 16; 3193 3194 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) 3195 return -1; 3196 3197 bulk = AES_gcm_encrypt(in + res, 3198 out + res, len - res, 3199 gctx->gcm.key, gctx->gcm.Yi.c, 3200 gctx->gcm.Xi.u); 3201 gctx->gcm.len.u[1] += bulk; 3202 bulk += res; 3203 } 3204 #endif 3205 if (CRYPTO_gcm128_encrypt(&gctx->gcm, 3206 in + bulk, out + bulk, len - bulk)) 3207 return -1; 3208 } 3209 } else { 3210 if (gctx->ctr) { 3211 size_t bulk = 0; 3212 #if defined(AES_GCM_ASM) 3213 if (len >= 16 && AES_GCM_ASM(gctx)) { 3214 size_t res = (16 - gctx->gcm.mres) % 16; 3215 3216 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) 3217 return -1; 3218 3219 bulk = AES_gcm_decrypt(in + res, 3220 out + res, len - res, 3221 gctx->gcm.key, 3222 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3223 gctx->gcm.len.u[1] += bulk; 3224 bulk += res; 3225 } 3226 #endif 3227 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, 3228 in + bulk, 3229 out + bulk, 3230 len - bulk, gctx->ctr)) 3231 return -1; 3232 } else { 3233 size_t bulk = 0; 3234 #if defined(AES_GCM_ASM2) 3235 if (len >= 16 && AES_GCM_ASM2(gctx)) { 3236 size_t res = (16 - gctx->gcm.mres) % 16; 3237 3238 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) 3239 return -1; 3240 3241 bulk = AES_gcm_decrypt(in + res, 3242 out + res, len - res, 3243 gctx->gcm.key, 3244 gctx->gcm.Yi.c, gctx->gcm.Xi.u); 3245 gctx->gcm.len.u[1] += bulk; 3246 bulk += res; 3247 } 3248 #endif 3249 if (CRYPTO_gcm128_decrypt(&gctx->gcm, 3250 in + bulk, out + bulk, len - bulk)) 3251 return -1; 3252 } 3253 } 3254 return len; 3255 } else { 3256 if (!ctx->encrypt) { 3257 if (gctx->taglen < 0) 3258 return -1; 3259 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) 3260 return -1; 3261 gctx->iv_set = 0; 3262 return 0; 3263 } 3264 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16); 3265 gctx->taglen = 16; 3266 /* Don't reuse the IV */ 3267 gctx->iv_set = 0; 3268 return 0; 3269 } 3270 3271 } 3272 3273 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \ 3274 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ 3275 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ 3276 | EVP_CIPH_CUSTOM_COPY) 3277 3278 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM, 3279 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3280 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM, 3281 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3282 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM, 3283 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3284 3285 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3286 { 3287 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c); 3288 if (type == EVP_CTRL_COPY) { 3289 EVP_CIPHER_CTX *out = ptr; 3290 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out); 3291 if (xctx->xts.key1) { 3292 if (xctx->xts.key1 != &xctx->ks1) 3293 return 0; 3294 xctx_out->xts.key1 = &xctx_out->ks1; 3295 } 3296 if (xctx->xts.key2) { 3297 if (xctx->xts.key2 != &xctx->ks2) 3298 return 0; 3299 xctx_out->xts.key2 = &xctx_out->ks2; 3300 } 3301 return 1; 3302 } else if (type != EVP_CTRL_INIT) 3303 return -1; 3304 /* key1 and key2 are used as an indicator both key and IV are set */ 3305 xctx->xts.key1 = NULL; 3306 xctx->xts.key2 = NULL; 3307 return 1; 3308 } 3309 3310 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3311 const unsigned char *iv, int enc) 3312 { 3313 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 3314 if (!iv && !key) 3315 return 1; 3316 3317 if (key) 3318 do { 3319 #ifdef AES_XTS_ASM 3320 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt; 3321 #else 3322 xctx->stream = NULL; 3323 #endif 3324 /* key_len is two AES keys */ 3325 #ifdef HWAES_CAPABLE 3326 if (HWAES_CAPABLE) { 3327 if (enc) { 3328 HWAES_set_encrypt_key(key, 3329 EVP_CIPHER_CTX_key_length(ctx) * 4, 3330 &xctx->ks1.ks); 3331 xctx->xts.block1 = (block128_f) HWAES_encrypt; 3332 # ifdef HWAES_xts_encrypt 3333 xctx->stream = HWAES_xts_encrypt; 3334 # endif 3335 } else { 3336 HWAES_set_decrypt_key(key, 3337 EVP_CIPHER_CTX_key_length(ctx) * 4, 3338 &xctx->ks1.ks); 3339 xctx->xts.block1 = (block128_f) HWAES_decrypt; 3340 # ifdef HWAES_xts_decrypt 3341 xctx->stream = HWAES_xts_decrypt; 3342 #endif 3343 } 3344 3345 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3346 EVP_CIPHER_CTX_key_length(ctx) * 4, 3347 &xctx->ks2.ks); 3348 xctx->xts.block2 = (block128_f) HWAES_encrypt; 3349 3350 xctx->xts.key1 = &xctx->ks1; 3351 break; 3352 } else 3353 #endif 3354 #ifdef BSAES_CAPABLE 3355 if (BSAES_CAPABLE) 3356 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt; 3357 else 3358 #endif 3359 #ifdef VPAES_CAPABLE 3360 if (VPAES_CAPABLE) { 3361 if (enc) { 3362 vpaes_set_encrypt_key(key, 3363 EVP_CIPHER_CTX_key_length(ctx) * 4, 3364 &xctx->ks1.ks); 3365 xctx->xts.block1 = (block128_f) vpaes_encrypt; 3366 } else { 3367 vpaes_set_decrypt_key(key, 3368 EVP_CIPHER_CTX_key_length(ctx) * 4, 3369 &xctx->ks1.ks); 3370 xctx->xts.block1 = (block128_f) vpaes_decrypt; 3371 } 3372 3373 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3374 EVP_CIPHER_CTX_key_length(ctx) * 4, 3375 &xctx->ks2.ks); 3376 xctx->xts.block2 = (block128_f) vpaes_encrypt; 3377 3378 xctx->xts.key1 = &xctx->ks1; 3379 break; 3380 } else 3381 #endif 3382 (void)0; /* terminate potentially open 'else' */ 3383 3384 if (enc) { 3385 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 3386 &xctx->ks1.ks); 3387 xctx->xts.block1 = (block128_f) AES_encrypt; 3388 } else { 3389 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4, 3390 &xctx->ks1.ks); 3391 xctx->xts.block1 = (block128_f) AES_decrypt; 3392 } 3393 3394 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2, 3395 EVP_CIPHER_CTX_key_length(ctx) * 4, 3396 &xctx->ks2.ks); 3397 xctx->xts.block2 = (block128_f) AES_encrypt; 3398 3399 xctx->xts.key1 = &xctx->ks1; 3400 } while (0); 3401 3402 if (iv) { 3403 xctx->xts.key2 = &xctx->ks2; 3404 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16); 3405 } 3406 3407 return 1; 3408 } 3409 3410 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3411 const unsigned char *in, size_t len) 3412 { 3413 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx); 3414 if (!xctx->xts.key1 || !xctx->xts.key2) 3415 return 0; 3416 if (!out || !in || len < AES_BLOCK_SIZE) 3417 return 0; 3418 if (xctx->stream) 3419 (*xctx->stream) (in, out, len, 3420 xctx->xts.key1, xctx->xts.key2, 3421 EVP_CIPHER_CTX_iv_noconst(ctx)); 3422 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx), 3423 in, out, len, 3424 EVP_CIPHER_CTX_encrypting(ctx))) 3425 return 0; 3426 return 1; 3427 } 3428 3429 #define aes_xts_cleanup NULL 3430 3431 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \ 3432 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \ 3433 | EVP_CIPH_CUSTOM_COPY) 3434 3435 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS) 3436 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS) 3437 3438 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3439 { 3440 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c); 3441 switch (type) { 3442 case EVP_CTRL_INIT: 3443 cctx->key_set = 0; 3444 cctx->iv_set = 0; 3445 cctx->L = 8; 3446 cctx->M = 12; 3447 cctx->tag_set = 0; 3448 cctx->len_set = 0; 3449 cctx->tls_aad_len = -1; 3450 return 1; 3451 3452 case EVP_CTRL_AEAD_TLS1_AAD: 3453 /* Save the AAD for later use */ 3454 if (arg != EVP_AEAD_TLS1_AAD_LEN) 3455 return 0; 3456 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg); 3457 cctx->tls_aad_len = arg; 3458 { 3459 uint16_t len = 3460 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8 3461 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1]; 3462 /* Correct length for explicit IV */ 3463 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN) 3464 return 0; 3465 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN; 3466 /* If decrypting correct for tag too */ 3467 if (!EVP_CIPHER_CTX_encrypting(c)) { 3468 if (len < cctx->M) 3469 return 0; 3470 len -= cctx->M; 3471 } 3472 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8; 3473 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff; 3474 } 3475 /* Extra padding: tag appended to record */ 3476 return cctx->M; 3477 3478 case EVP_CTRL_CCM_SET_IV_FIXED: 3479 /* Sanity check length */ 3480 if (arg != EVP_CCM_TLS_FIXED_IV_LEN) 3481 return 0; 3482 /* Just copy to first part of IV */ 3483 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg); 3484 return 1; 3485 3486 case EVP_CTRL_AEAD_SET_IVLEN: 3487 arg = 15 - arg; 3488 /* fall thru */ 3489 case EVP_CTRL_CCM_SET_L: 3490 if (arg < 2 || arg > 8) 3491 return 0; 3492 cctx->L = arg; 3493 return 1; 3494 3495 case EVP_CTRL_AEAD_SET_TAG: 3496 if ((arg & 1) || arg < 4 || arg > 16) 3497 return 0; 3498 if (EVP_CIPHER_CTX_encrypting(c) && ptr) 3499 return 0; 3500 if (ptr) { 3501 cctx->tag_set = 1; 3502 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg); 3503 } 3504 cctx->M = arg; 3505 return 1; 3506 3507 case EVP_CTRL_AEAD_GET_TAG: 3508 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set) 3509 return 0; 3510 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg)) 3511 return 0; 3512 cctx->tag_set = 0; 3513 cctx->iv_set = 0; 3514 cctx->len_set = 0; 3515 return 1; 3516 3517 case EVP_CTRL_COPY: 3518 { 3519 EVP_CIPHER_CTX *out = ptr; 3520 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out); 3521 if (cctx->ccm.key) { 3522 if (cctx->ccm.key != &cctx->ks) 3523 return 0; 3524 cctx_out->ccm.key = &cctx_out->ks; 3525 } 3526 return 1; 3527 } 3528 3529 default: 3530 return -1; 3531 3532 } 3533 } 3534 3535 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3536 const unsigned char *iv, int enc) 3537 { 3538 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3539 if (!iv && !key) 3540 return 1; 3541 if (key) 3542 do { 3543 #ifdef HWAES_CAPABLE 3544 if (HWAES_CAPABLE) { 3545 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3546 &cctx->ks.ks); 3547 3548 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3549 &cctx->ks, (block128_f) HWAES_encrypt); 3550 cctx->str = NULL; 3551 cctx->key_set = 1; 3552 break; 3553 } else 3554 #endif 3555 #ifdef VPAES_CAPABLE 3556 if (VPAES_CAPABLE) { 3557 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3558 &cctx->ks.ks); 3559 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3560 &cctx->ks, (block128_f) vpaes_encrypt); 3561 cctx->str = NULL; 3562 cctx->key_set = 1; 3563 break; 3564 } 3565 #endif 3566 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3567 &cctx->ks.ks); 3568 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L, 3569 &cctx->ks, (block128_f) AES_encrypt); 3570 cctx->str = NULL; 3571 cctx->key_set = 1; 3572 } while (0); 3573 if (iv) { 3574 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L); 3575 cctx->iv_set = 1; 3576 } 3577 return 1; 3578 } 3579 3580 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3581 const unsigned char *in, size_t len) 3582 { 3583 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3584 CCM128_CONTEXT *ccm = &cctx->ccm; 3585 /* Encrypt/decrypt must be performed in place */ 3586 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M)) 3587 return -1; 3588 /* If encrypting set explicit IV from sequence number (start of AAD) */ 3589 if (EVP_CIPHER_CTX_encrypting(ctx)) 3590 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx), 3591 EVP_CCM_TLS_EXPLICIT_IV_LEN); 3592 /* Get rest of IV from explicit IV */ 3593 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in, 3594 EVP_CCM_TLS_EXPLICIT_IV_LEN); 3595 /* Correct length value */ 3596 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M; 3597 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L, 3598 len)) 3599 return -1; 3600 /* Use saved AAD */ 3601 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len); 3602 /* Fix buffer to point to payload */ 3603 in += EVP_CCM_TLS_EXPLICIT_IV_LEN; 3604 out += EVP_CCM_TLS_EXPLICIT_IV_LEN; 3605 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3606 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, 3607 cctx->str) : 3608 CRYPTO_ccm128_encrypt(ccm, in, out, len)) 3609 return -1; 3610 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M)) 3611 return -1; 3612 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M; 3613 } else { 3614 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, 3615 cctx->str) : 3616 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) { 3617 unsigned char tag[16]; 3618 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) { 3619 if (!CRYPTO_memcmp(tag, in + len, cctx->M)) 3620 return len; 3621 } 3622 } 3623 OPENSSL_cleanse(out, len); 3624 return -1; 3625 } 3626 } 3627 3628 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3629 const unsigned char *in, size_t len) 3630 { 3631 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx); 3632 CCM128_CONTEXT *ccm = &cctx->ccm; 3633 /* If not set up, return error */ 3634 if (!cctx->key_set) 3635 return -1; 3636 3637 if (cctx->tls_aad_len >= 0) 3638 return aes_ccm_tls_cipher(ctx, out, in, len); 3639 3640 /* EVP_*Final() doesn't return any data */ 3641 if (in == NULL && out != NULL) 3642 return 0; 3643 3644 if (!cctx->iv_set) 3645 return -1; 3646 3647 if (!out) { 3648 if (!in) { 3649 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 3650 15 - cctx->L, len)) 3651 return -1; 3652 cctx->len_set = 1; 3653 return len; 3654 } 3655 /* If have AAD need message length */ 3656 if (!cctx->len_set && len) 3657 return -1; 3658 CRYPTO_ccm128_aad(ccm, in, len); 3659 return len; 3660 } 3661 3662 /* The tag must be set before actually decrypting data */ 3663 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set) 3664 return -1; 3665 3666 /* If not set length yet do it */ 3667 if (!cctx->len_set) { 3668 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 3669 15 - cctx->L, len)) 3670 return -1; 3671 cctx->len_set = 1; 3672 } 3673 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3674 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, 3675 cctx->str) : 3676 CRYPTO_ccm128_encrypt(ccm, in, out, len)) 3677 return -1; 3678 cctx->tag_set = 1; 3679 return len; 3680 } else { 3681 int rv = -1; 3682 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, 3683 cctx->str) : 3684 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) { 3685 unsigned char tag[16]; 3686 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) { 3687 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx), 3688 cctx->M)) 3689 rv = len; 3690 } 3691 } 3692 if (rv == -1) 3693 OPENSSL_cleanse(out, len); 3694 cctx->iv_set = 0; 3695 cctx->tag_set = 0; 3696 cctx->len_set = 0; 3697 return rv; 3698 } 3699 } 3700 3701 #define aes_ccm_cleanup NULL 3702 3703 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM, 3704 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3705 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM, 3706 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3707 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM, 3708 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 3709 3710 typedef struct { 3711 union { 3712 double align; 3713 AES_KEY ks; 3714 } ks; 3715 /* Indicates if IV has been set */ 3716 unsigned char *iv; 3717 } EVP_AES_WRAP_CTX; 3718 3719 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3720 const unsigned char *iv, int enc) 3721 { 3722 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx); 3723 if (!iv && !key) 3724 return 1; 3725 if (key) { 3726 if (EVP_CIPHER_CTX_encrypting(ctx)) 3727 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3728 &wctx->ks.ks); 3729 else 3730 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3731 &wctx->ks.ks); 3732 if (!iv) 3733 wctx->iv = NULL; 3734 } 3735 if (iv) { 3736 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx)); 3737 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx); 3738 } 3739 return 1; 3740 } 3741 3742 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 3743 const unsigned char *in, size_t inlen) 3744 { 3745 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx); 3746 size_t rv; 3747 /* AES wrap with padding has IV length of 4, without padding 8 */ 3748 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4; 3749 /* No final operation so always return zero length */ 3750 if (!in) 3751 return 0; 3752 /* Input length must always be non-zero */ 3753 if (!inlen) 3754 return -1; 3755 /* If decrypting need at least 16 bytes and multiple of 8 */ 3756 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7)) 3757 return -1; 3758 /* If not padding input must be multiple of 8 */ 3759 if (!pad && inlen & 0x7) 3760 return -1; 3761 if (is_partially_overlapping(out, in, inlen)) { 3762 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING); 3763 return 0; 3764 } 3765 if (!out) { 3766 if (EVP_CIPHER_CTX_encrypting(ctx)) { 3767 /* If padding round up to multiple of 8 */ 3768 if (pad) 3769 inlen = (inlen + 7) / 8 * 8; 3770 /* 8 byte prefix */ 3771 return inlen + 8; 3772 } else { 3773 /* 3774 * If not padding output will be exactly 8 bytes smaller than 3775 * input. If padding it will be at least 8 bytes smaller but we 3776 * don't know how much. 3777 */ 3778 return inlen - 8; 3779 } 3780 } 3781 if (pad) { 3782 if (EVP_CIPHER_CTX_encrypting(ctx)) 3783 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv, 3784 out, in, inlen, 3785 (block128_f) AES_encrypt); 3786 else 3787 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv, 3788 out, in, inlen, 3789 (block128_f) AES_decrypt); 3790 } else { 3791 if (EVP_CIPHER_CTX_encrypting(ctx)) 3792 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv, 3793 out, in, inlen, (block128_f) AES_encrypt); 3794 else 3795 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv, 3796 out, in, inlen, (block128_f) AES_decrypt); 3797 } 3798 return rv ? (int)rv : -1; 3799 } 3800 3801 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \ 3802 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \ 3803 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1) 3804 3805 static const EVP_CIPHER aes_128_wrap = { 3806 NID_id_aes128_wrap, 3807 8, 16, 8, WRAP_FLAGS, 3808 aes_wrap_init_key, aes_wrap_cipher, 3809 NULL, 3810 sizeof(EVP_AES_WRAP_CTX), 3811 NULL, NULL, NULL, NULL 3812 }; 3813 3814 const EVP_CIPHER *EVP_aes_128_wrap(void) 3815 { 3816 return &aes_128_wrap; 3817 } 3818 3819 static const EVP_CIPHER aes_192_wrap = { 3820 NID_id_aes192_wrap, 3821 8, 24, 8, WRAP_FLAGS, 3822 aes_wrap_init_key, aes_wrap_cipher, 3823 NULL, 3824 sizeof(EVP_AES_WRAP_CTX), 3825 NULL, NULL, NULL, NULL 3826 }; 3827 3828 const EVP_CIPHER *EVP_aes_192_wrap(void) 3829 { 3830 return &aes_192_wrap; 3831 } 3832 3833 static const EVP_CIPHER aes_256_wrap = { 3834 NID_id_aes256_wrap, 3835 8, 32, 8, WRAP_FLAGS, 3836 aes_wrap_init_key, aes_wrap_cipher, 3837 NULL, 3838 sizeof(EVP_AES_WRAP_CTX), 3839 NULL, NULL, NULL, NULL 3840 }; 3841 3842 const EVP_CIPHER *EVP_aes_256_wrap(void) 3843 { 3844 return &aes_256_wrap; 3845 } 3846 3847 static const EVP_CIPHER aes_128_wrap_pad = { 3848 NID_id_aes128_wrap_pad, 3849 8, 16, 4, WRAP_FLAGS, 3850 aes_wrap_init_key, aes_wrap_cipher, 3851 NULL, 3852 sizeof(EVP_AES_WRAP_CTX), 3853 NULL, NULL, NULL, NULL 3854 }; 3855 3856 const EVP_CIPHER *EVP_aes_128_wrap_pad(void) 3857 { 3858 return &aes_128_wrap_pad; 3859 } 3860 3861 static const EVP_CIPHER aes_192_wrap_pad = { 3862 NID_id_aes192_wrap_pad, 3863 8, 24, 4, WRAP_FLAGS, 3864 aes_wrap_init_key, aes_wrap_cipher, 3865 NULL, 3866 sizeof(EVP_AES_WRAP_CTX), 3867 NULL, NULL, NULL, NULL 3868 }; 3869 3870 const EVP_CIPHER *EVP_aes_192_wrap_pad(void) 3871 { 3872 return &aes_192_wrap_pad; 3873 } 3874 3875 static const EVP_CIPHER aes_256_wrap_pad = { 3876 NID_id_aes256_wrap_pad, 3877 8, 32, 4, WRAP_FLAGS, 3878 aes_wrap_init_key, aes_wrap_cipher, 3879 NULL, 3880 sizeof(EVP_AES_WRAP_CTX), 3881 NULL, NULL, NULL, NULL 3882 }; 3883 3884 const EVP_CIPHER *EVP_aes_256_wrap_pad(void) 3885 { 3886 return &aes_256_wrap_pad; 3887 } 3888 3889 #ifndef OPENSSL_NO_OCB 3890 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) 3891 { 3892 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c); 3893 EVP_CIPHER_CTX *newc; 3894 EVP_AES_OCB_CTX *new_octx; 3895 3896 switch (type) { 3897 case EVP_CTRL_INIT: 3898 octx->key_set = 0; 3899 octx->iv_set = 0; 3900 octx->ivlen = EVP_CIPHER_CTX_iv_length(c); 3901 octx->iv = EVP_CIPHER_CTX_iv_noconst(c); 3902 octx->taglen = 16; 3903 octx->data_buf_len = 0; 3904 octx->aad_buf_len = 0; 3905 return 1; 3906 3907 case EVP_CTRL_AEAD_SET_IVLEN: 3908 /* IV len must be 1 to 15 */ 3909 if (arg <= 0 || arg > 15) 3910 return 0; 3911 3912 octx->ivlen = arg; 3913 return 1; 3914 3915 case EVP_CTRL_AEAD_SET_TAG: 3916 if (!ptr) { 3917 /* Tag len must be 0 to 16 */ 3918 if (arg < 0 || arg > 16) 3919 return 0; 3920 3921 octx->taglen = arg; 3922 return 1; 3923 } 3924 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c)) 3925 return 0; 3926 memcpy(octx->tag, ptr, arg); 3927 return 1; 3928 3929 case EVP_CTRL_AEAD_GET_TAG: 3930 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c)) 3931 return 0; 3932 3933 memcpy(ptr, octx->tag, arg); 3934 return 1; 3935 3936 case EVP_CTRL_COPY: 3937 newc = (EVP_CIPHER_CTX *)ptr; 3938 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc); 3939 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb, 3940 &new_octx->ksenc.ks, 3941 &new_octx->ksdec.ks); 3942 3943 default: 3944 return -1; 3945 3946 } 3947 } 3948 3949 # ifdef HWAES_CAPABLE 3950 # ifdef HWAES_ocb_encrypt 3951 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out, 3952 size_t blocks, const void *key, 3953 size_t start_block_num, 3954 unsigned char offset_i[16], 3955 const unsigned char L_[][16], 3956 unsigned char checksum[16]); 3957 # else 3958 # define HWAES_ocb_encrypt ((ocb128_f)NULL) 3959 # endif 3960 # ifdef HWAES_ocb_decrypt 3961 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out, 3962 size_t blocks, const void *key, 3963 size_t start_block_num, 3964 unsigned char offset_i[16], 3965 const unsigned char L_[][16], 3966 unsigned char checksum[16]); 3967 # else 3968 # define HWAES_ocb_decrypt ((ocb128_f)NULL) 3969 # endif 3970 # endif 3971 3972 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key, 3973 const unsigned char *iv, int enc) 3974 { 3975 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 3976 if (!iv && !key) 3977 return 1; 3978 if (key) { 3979 do { 3980 /* 3981 * We set both the encrypt and decrypt key here because decrypt 3982 * needs both. We could possibly optimise to remove setting the 3983 * decrypt for an encryption operation. 3984 */ 3985 # ifdef HWAES_CAPABLE 3986 if (HWAES_CAPABLE) { 3987 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3988 &octx->ksenc.ks); 3989 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 3990 &octx->ksdec.ks); 3991 if (!CRYPTO_ocb128_init(&octx->ocb, 3992 &octx->ksenc.ks, &octx->ksdec.ks, 3993 (block128_f) HWAES_encrypt, 3994 (block128_f) HWAES_decrypt, 3995 enc ? HWAES_ocb_encrypt 3996 : HWAES_ocb_decrypt)) 3997 return 0; 3998 break; 3999 } 4000 # endif 4001 # ifdef VPAES_CAPABLE 4002 if (VPAES_CAPABLE) { 4003 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4004 &octx->ksenc.ks); 4005 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4006 &octx->ksdec.ks); 4007 if (!CRYPTO_ocb128_init(&octx->ocb, 4008 &octx->ksenc.ks, &octx->ksdec.ks, 4009 (block128_f) vpaes_encrypt, 4010 (block128_f) vpaes_decrypt, 4011 NULL)) 4012 return 0; 4013 break; 4014 } 4015 # endif 4016 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4017 &octx->ksenc.ks); 4018 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8, 4019 &octx->ksdec.ks); 4020 if (!CRYPTO_ocb128_init(&octx->ocb, 4021 &octx->ksenc.ks, &octx->ksdec.ks, 4022 (block128_f) AES_encrypt, 4023 (block128_f) AES_decrypt, 4024 NULL)) 4025 return 0; 4026 } 4027 while (0); 4028 4029 /* 4030 * If we have an iv we can set it directly, otherwise use saved IV. 4031 */ 4032 if (iv == NULL && octx->iv_set) 4033 iv = octx->iv; 4034 if (iv) { 4035 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen) 4036 != 1) 4037 return 0; 4038 octx->iv_set = 1; 4039 } 4040 octx->key_set = 1; 4041 } else { 4042 /* If key set use IV, otherwise copy */ 4043 if (octx->key_set) 4044 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen); 4045 else 4046 memcpy(octx->iv, iv, octx->ivlen); 4047 octx->iv_set = 1; 4048 } 4049 return 1; 4050 } 4051 4052 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out, 4053 const unsigned char *in, size_t len) 4054 { 4055 unsigned char *buf; 4056 int *buf_len; 4057 int written_len = 0; 4058 size_t trailing_len; 4059 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx); 4060 4061 /* If IV or Key not set then return error */ 4062 if (!octx->iv_set) 4063 return -1; 4064 4065 if (!octx->key_set) 4066 return -1; 4067 4068 if (in != NULL) { 4069 /* 4070 * Need to ensure we are only passing full blocks to low level OCB 4071 * routines. We do it here rather than in EVP_EncryptUpdate/ 4072 * EVP_DecryptUpdate because we need to pass full blocks of AAD too 4073 * and those routines don't support that 4074 */ 4075 4076 /* Are we dealing with AAD or normal data here? */ 4077 if (out == NULL) { 4078 buf = octx->aad_buf; 4079 buf_len = &(octx->aad_buf_len); 4080 } else { 4081 buf = octx->data_buf; 4082 buf_len = &(octx->data_buf_len); 4083 4084 if (is_partially_overlapping(out + *buf_len, in, len)) { 4085 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING); 4086 return 0; 4087 } 4088 } 4089 4090 /* 4091 * If we've got a partially filled buffer from a previous call then 4092 * use that data first 4093 */ 4094 if (*buf_len > 0) { 4095 unsigned int remaining; 4096 4097 remaining = AES_BLOCK_SIZE - (*buf_len); 4098 if (remaining > len) { 4099 memcpy(buf + (*buf_len), in, len); 4100 *(buf_len) += len; 4101 return 0; 4102 } 4103 memcpy(buf + (*buf_len), in, remaining); 4104 4105 /* 4106 * If we get here we've filled the buffer, so process it 4107 */ 4108 len -= remaining; 4109 in += remaining; 4110 if (out == NULL) { 4111 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE)) 4112 return -1; 4113 } else if (EVP_CIPHER_CTX_encrypting(ctx)) { 4114 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out, 4115 AES_BLOCK_SIZE)) 4116 return -1; 4117 } else { 4118 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out, 4119 AES_BLOCK_SIZE)) 4120 return -1; 4121 } 4122 written_len = AES_BLOCK_SIZE; 4123 *buf_len = 0; 4124 if (out != NULL) 4125 out += AES_BLOCK_SIZE; 4126 } 4127 4128 /* Do we have a partial block to handle at the end? */ 4129 trailing_len = len % AES_BLOCK_SIZE; 4130 4131 /* 4132 * If we've got some full blocks to handle, then process these first 4133 */ 4134 if (len != trailing_len) { 4135 if (out == NULL) { 4136 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len)) 4137 return -1; 4138 } else if (EVP_CIPHER_CTX_encrypting(ctx)) { 4139 if (!CRYPTO_ocb128_encrypt 4140 (&octx->ocb, in, out, len - trailing_len)) 4141 return -1; 4142 } else { 4143 if (!CRYPTO_ocb128_decrypt 4144 (&octx->ocb, in, out, len - trailing_len)) 4145 return -1; 4146 } 4147 written_len += len - trailing_len; 4148 in += len - trailing_len; 4149 } 4150 4151 /* Handle any trailing partial block */ 4152 if (trailing_len > 0) { 4153 memcpy(buf, in, trailing_len); 4154 *buf_len = trailing_len; 4155 } 4156 4157 return written_len; 4158 } else { 4159 /* 4160 * First of all empty the buffer of any partial block that we might 4161 * have been provided - both for data and AAD 4162 */ 4163 if (octx->data_buf_len > 0) { 4164 if (EVP_CIPHER_CTX_encrypting(ctx)) { 4165 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out, 4166 octx->data_buf_len)) 4167 return -1; 4168 } else { 4169 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out, 4170 octx->data_buf_len)) 4171 return -1; 4172 } 4173 written_len = octx->data_buf_len; 4174 octx->data_buf_len = 0; 4175 } 4176 if (octx->aad_buf_len > 0) { 4177 if (!CRYPTO_ocb128_aad 4178 (&octx->ocb, octx->aad_buf, octx->aad_buf_len)) 4179 return -1; 4180 octx->aad_buf_len = 0; 4181 } 4182 /* If decrypting then verify */ 4183 if (!EVP_CIPHER_CTX_encrypting(ctx)) { 4184 if (octx->taglen < 0) 4185 return -1; 4186 if (CRYPTO_ocb128_finish(&octx->ocb, 4187 octx->tag, octx->taglen) != 0) 4188 return -1; 4189 octx->iv_set = 0; 4190 return written_len; 4191 } 4192 /* If encrypting then just get the tag */ 4193 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1) 4194 return -1; 4195 /* Don't reuse the IV */ 4196 octx->iv_set = 0; 4197 return written_len; 4198 } 4199 } 4200 4201 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c) 4202 { 4203 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c); 4204 CRYPTO_ocb128_cleanup(&octx->ocb); 4205 return 1; 4206 } 4207 4208 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB, 4209 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4210 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB, 4211 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4212 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB, 4213 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS) 4214 #endif /* OPENSSL_NO_OCB */ 4215