1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or https://opensource.org/licenses/CDDL-1.0. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved. 23 */ 24 25 /* 26 * AES provider for the Kernel Cryptographic Framework (KCF) 27 */ 28 29 #include <sys/zfs_context.h> 30 #include <sys/crypto/common.h> 31 #include <sys/crypto/impl.h> 32 #include <sys/crypto/spi.h> 33 #include <sys/crypto/icp.h> 34 #include <modes/modes.h> 35 #define _AES_IMPL 36 #include <aes/aes_impl.h> 37 #include <modes/gcm_impl.h> 38 39 /* 40 * Mechanism info structure passed to KCF during registration. 41 */ 42 static const crypto_mech_info_t aes_mech_info_tab[] = { 43 /* AES_ECB */ 44 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE, 45 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 46 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC}, 47 /* AES_CBC */ 48 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE, 49 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 50 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC}, 51 /* AES_CTR */ 52 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE, 53 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 54 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC}, 55 /* AES_CCM */ 56 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE, 57 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 58 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC}, 59 /* AES_GCM */ 60 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE, 61 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 62 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC}, 63 /* AES_GMAC */ 64 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE, 65 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 66 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC | 67 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC}, 68 }; 69 70 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 71 crypto_key_t *, crypto_spi_ctx_template_t); 72 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 73 crypto_key_t *, crypto_spi_ctx_template_t); 74 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *, 75 crypto_key_t *, crypto_spi_ctx_template_t, boolean_t); 76 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *, 77 crypto_mechanism_t *, crypto_key_t *, int, boolean_t); 78 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *); 79 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *); 80 81 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *); 82 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *, 83 crypto_data_t *); 84 static int aes_encrypt_atomic(crypto_mechanism_t *, crypto_key_t *, 85 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t); 86 87 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *); 88 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *, 89 crypto_data_t *); 90 static int aes_decrypt_atomic(crypto_mechanism_t *, crypto_key_t *, 91 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t); 92 93 static const crypto_cipher_ops_t aes_cipher_ops = { 94 .encrypt_init = aes_encrypt_init, 95 .encrypt = aes_encrypt, 96 .encrypt_update = aes_encrypt_update, 97 .encrypt_final = aes_encrypt_final, 98 .encrypt_atomic = aes_encrypt_atomic, 99 .decrypt_init = aes_decrypt_init, 100 .decrypt = aes_decrypt, 101 .decrypt_update = aes_decrypt_update, 102 .decrypt_final = aes_decrypt_final, 103 .decrypt_atomic = aes_decrypt_atomic 104 }; 105 106 static int aes_mac_atomic(crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 107 crypto_data_t *, crypto_spi_ctx_template_t); 108 static int aes_mac_verify_atomic(crypto_mechanism_t *, crypto_key_t *, 109 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t); 110 111 static const crypto_mac_ops_t aes_mac_ops = { 112 .mac_init = NULL, 113 .mac = NULL, 114 .mac_update = NULL, 115 .mac_final = NULL, 116 .mac_atomic = aes_mac_atomic, 117 .mac_verify_atomic = aes_mac_verify_atomic 118 }; 119 120 static int aes_create_ctx_template(crypto_mechanism_t *, crypto_key_t *, 121 crypto_spi_ctx_template_t *, size_t *); 122 static int aes_free_context(crypto_ctx_t *); 123 124 static const crypto_ctx_ops_t aes_ctx_ops = { 125 .create_ctx_template = aes_create_ctx_template, 126 .free_context = aes_free_context 127 }; 128 129 static const crypto_ops_t aes_crypto_ops = { 130 NULL, 131 &aes_cipher_ops, 132 &aes_mac_ops, 133 &aes_ctx_ops, 134 }; 135 136 static const crypto_provider_info_t aes_prov_info = { 137 "AES Software Provider", 138 &aes_crypto_ops, 139 sizeof (aes_mech_info_tab) / sizeof (crypto_mech_info_t), 140 aes_mech_info_tab 141 }; 142 143 static crypto_kcf_provider_handle_t aes_prov_handle = 0; 144 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW }; 145 146 int 147 aes_mod_init(void) 148 { 149 /* Determine the fastest available implementation. */ 150 aes_impl_init(); 151 gcm_impl_init(); 152 153 /* Register with KCF. If the registration fails, remove the module. */ 154 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) 155 return (EACCES); 156 157 return (0); 158 } 159 160 int 161 aes_mod_fini(void) 162 { 163 /* Unregister from KCF if module is registered */ 164 if (aes_prov_handle != 0) { 165 if (crypto_unregister_provider(aes_prov_handle)) 166 return (EBUSY); 167 168 aes_prov_handle = 0; 169 } 170 171 return (0); 172 } 173 174 static int 175 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx) 176 { 177 void *p = NULL; 178 boolean_t param_required = B_TRUE; 179 size_t param_len; 180 void *(*alloc_fun)(int); 181 int rv = CRYPTO_SUCCESS; 182 183 switch (mechanism->cm_type) { 184 case AES_ECB_MECH_INFO_TYPE: 185 param_required = B_FALSE; 186 alloc_fun = ecb_alloc_ctx; 187 break; 188 case AES_CBC_MECH_INFO_TYPE: 189 param_len = AES_BLOCK_LEN; 190 alloc_fun = cbc_alloc_ctx; 191 break; 192 case AES_CTR_MECH_INFO_TYPE: 193 param_len = sizeof (CK_AES_CTR_PARAMS); 194 alloc_fun = ctr_alloc_ctx; 195 break; 196 case AES_CCM_MECH_INFO_TYPE: 197 param_len = sizeof (CK_AES_CCM_PARAMS); 198 alloc_fun = ccm_alloc_ctx; 199 break; 200 case AES_GCM_MECH_INFO_TYPE: 201 param_len = sizeof (CK_AES_GCM_PARAMS); 202 alloc_fun = gcm_alloc_ctx; 203 break; 204 case AES_GMAC_MECH_INFO_TYPE: 205 param_len = sizeof (CK_AES_GMAC_PARAMS); 206 alloc_fun = gmac_alloc_ctx; 207 break; 208 default: 209 rv = CRYPTO_MECHANISM_INVALID; 210 return (rv); 211 } 212 if (param_required && mechanism->cm_param != NULL && 213 mechanism->cm_param_len != param_len) { 214 rv = CRYPTO_MECHANISM_PARAM_INVALID; 215 } 216 if (ctx != NULL) { 217 p = (alloc_fun)(KM_SLEEP); 218 *ctx = p; 219 } 220 return (rv); 221 } 222 223 /* 224 * Initialize key schedules for AES 225 */ 226 static int 227 init_keysched(crypto_key_t *key, void *newbie) 228 { 229 if (key->ck_length < AES_MINBITS || 230 key->ck_length > AES_MAXBITS) { 231 return (CRYPTO_KEY_SIZE_RANGE); 232 } 233 234 /* key length must be either 128, 192, or 256 */ 235 if ((key->ck_length & 63) != 0) 236 return (CRYPTO_KEY_SIZE_RANGE); 237 238 aes_init_keysched(key->ck_data, key->ck_length, newbie); 239 return (CRYPTO_SUCCESS); 240 } 241 242 static int 243 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 244 crypto_key_t *key, crypto_spi_ctx_template_t template) 245 { 246 return (aes_common_init(ctx, mechanism, key, template, B_TRUE)); 247 } 248 249 static int 250 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 251 crypto_key_t *key, crypto_spi_ctx_template_t template) 252 { 253 return (aes_common_init(ctx, mechanism, key, template, B_FALSE)); 254 } 255 256 257 258 /* 259 * KCF software provider encrypt entry points. 260 */ 261 static int 262 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 263 crypto_key_t *key, crypto_spi_ctx_template_t template, 264 boolean_t is_encrypt_init) 265 { 266 aes_ctx_t *aes_ctx; 267 int rv; 268 269 if ((rv = aes_check_mech_param(mechanism, &aes_ctx)) 270 != CRYPTO_SUCCESS) 271 return (rv); 272 273 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, KM_SLEEP, 274 is_encrypt_init); 275 if (rv != CRYPTO_SUCCESS) { 276 crypto_free_mode_ctx(aes_ctx); 277 return (rv); 278 } 279 280 ctx->cc_provider_private = aes_ctx; 281 282 return (CRYPTO_SUCCESS); 283 } 284 285 static void 286 aes_copy_block64(uint8_t *in, uint64_t *out) 287 { 288 if (IS_P2ALIGNED(in, sizeof (uint64_t))) { 289 /* LINTED: pointer alignment */ 290 out[0] = *(uint64_t *)&in[0]; 291 /* LINTED: pointer alignment */ 292 out[1] = *(uint64_t *)&in[8]; 293 } else { 294 uint8_t *iv8 = (uint8_t *)&out[0]; 295 296 AES_COPY_BLOCK(in, iv8); 297 } 298 } 299 300 301 static int 302 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext, 303 crypto_data_t *ciphertext) 304 { 305 int ret = CRYPTO_FAILED; 306 307 aes_ctx_t *aes_ctx; 308 size_t saved_length, saved_offset, length_needed; 309 310 ASSERT(ctx->cc_provider_private != NULL); 311 aes_ctx = ctx->cc_provider_private; 312 313 /* 314 * For block ciphers, plaintext must be a multiple of AES block size. 315 * This test is only valid for ciphers whose blocksize is a power of 2. 316 */ 317 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) 318 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 319 return (CRYPTO_DATA_LEN_RANGE); 320 321 ASSERT(ciphertext != NULL); 322 323 /* 324 * We need to just return the length needed to store the output. 325 * We should not destroy the context for the following case. 326 */ 327 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) { 328 case CCM_MODE: 329 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len; 330 break; 331 case GCM_MODE: 332 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len; 333 break; 334 case GMAC_MODE: 335 if (plaintext->cd_length != 0) 336 return (CRYPTO_ARGUMENTS_BAD); 337 338 length_needed = aes_ctx->ac_tag_len; 339 break; 340 default: 341 length_needed = plaintext->cd_length; 342 } 343 344 if (ciphertext->cd_length < length_needed) { 345 ciphertext->cd_length = length_needed; 346 return (CRYPTO_BUFFER_TOO_SMALL); 347 } 348 349 saved_length = ciphertext->cd_length; 350 saved_offset = ciphertext->cd_offset; 351 352 /* 353 * Do an update on the specified input data. 354 */ 355 ret = aes_encrypt_update(ctx, plaintext, ciphertext); 356 if (ret != CRYPTO_SUCCESS) { 357 return (ret); 358 } 359 360 /* 361 * For CCM mode, aes_ccm_encrypt_final() will take care of any 362 * left-over unprocessed data, and compute the MAC 363 */ 364 if (aes_ctx->ac_flags & CCM_MODE) { 365 /* 366 * ccm_encrypt_final() will compute the MAC and append 367 * it to existing ciphertext. So, need to adjust the left over 368 * length value accordingly 369 */ 370 371 /* order of following 2 lines MUST not be reversed */ 372 ciphertext->cd_offset = ciphertext->cd_length; 373 ciphertext->cd_length = saved_length - ciphertext->cd_length; 374 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext, 375 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 376 if (ret != CRYPTO_SUCCESS) { 377 return (ret); 378 } 379 380 if (plaintext != ciphertext) { 381 ciphertext->cd_length = 382 ciphertext->cd_offset - saved_offset; 383 } 384 ciphertext->cd_offset = saved_offset; 385 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 386 /* 387 * gcm_encrypt_final() will compute the MAC and append 388 * it to existing ciphertext. So, need to adjust the left over 389 * length value accordingly 390 */ 391 392 /* order of following 2 lines MUST not be reversed */ 393 ciphertext->cd_offset = ciphertext->cd_length; 394 ciphertext->cd_length = saved_length - ciphertext->cd_length; 395 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext, 396 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 397 aes_xor_block); 398 if (ret != CRYPTO_SUCCESS) { 399 return (ret); 400 } 401 402 if (plaintext != ciphertext) { 403 ciphertext->cd_length = 404 ciphertext->cd_offset - saved_offset; 405 } 406 ciphertext->cd_offset = saved_offset; 407 } 408 409 ASSERT(aes_ctx->ac_remainder_len == 0); 410 (void) aes_free_context(ctx); 411 412 return (ret); 413 } 414 415 416 static int 417 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 418 crypto_data_t *plaintext) 419 { 420 int ret = CRYPTO_FAILED; 421 422 aes_ctx_t *aes_ctx; 423 off_t saved_offset; 424 size_t saved_length, length_needed; 425 426 ASSERT(ctx->cc_provider_private != NULL); 427 aes_ctx = ctx->cc_provider_private; 428 429 /* 430 * For block ciphers, plaintext must be a multiple of AES block size. 431 * This test is only valid for ciphers whose blocksize is a power of 2. 432 */ 433 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) 434 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) { 435 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 436 } 437 438 ASSERT(plaintext != NULL); 439 440 /* 441 * Return length needed to store the output. 442 * Do not destroy context when plaintext buffer is too small. 443 * 444 * CCM: plaintext is MAC len smaller than cipher text 445 * GCM: plaintext is TAG len smaller than cipher text 446 * GMAC: plaintext length must be zero 447 */ 448 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) { 449 case CCM_MODE: 450 length_needed = aes_ctx->ac_processed_data_len; 451 break; 452 case GCM_MODE: 453 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len; 454 break; 455 case GMAC_MODE: 456 if (plaintext->cd_length != 0) 457 return (CRYPTO_ARGUMENTS_BAD); 458 459 length_needed = 0; 460 break; 461 default: 462 length_needed = ciphertext->cd_length; 463 } 464 465 if (plaintext->cd_length < length_needed) { 466 plaintext->cd_length = length_needed; 467 return (CRYPTO_BUFFER_TOO_SMALL); 468 } 469 470 saved_offset = plaintext->cd_offset; 471 saved_length = plaintext->cd_length; 472 473 /* 474 * Do an update on the specified input data. 475 */ 476 ret = aes_decrypt_update(ctx, ciphertext, plaintext); 477 if (ret != CRYPTO_SUCCESS) { 478 goto cleanup; 479 } 480 481 if (aes_ctx->ac_flags & CCM_MODE) { 482 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len); 483 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 484 485 /* order of following 2 lines MUST not be reversed */ 486 plaintext->cd_offset = plaintext->cd_length; 487 plaintext->cd_length = saved_length - plaintext->cd_length; 488 489 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext, 490 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 491 aes_xor_block); 492 if (ret == CRYPTO_SUCCESS) { 493 if (plaintext != ciphertext) { 494 plaintext->cd_length = 495 plaintext->cd_offset - saved_offset; 496 } 497 } else { 498 plaintext->cd_length = saved_length; 499 } 500 501 plaintext->cd_offset = saved_offset; 502 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 503 /* order of following 2 lines MUST not be reversed */ 504 plaintext->cd_offset = plaintext->cd_length; 505 plaintext->cd_length = saved_length - plaintext->cd_length; 506 507 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext, 508 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 509 if (ret == CRYPTO_SUCCESS) { 510 if (plaintext != ciphertext) { 511 plaintext->cd_length = 512 plaintext->cd_offset - saved_offset; 513 } 514 } else { 515 plaintext->cd_length = saved_length; 516 } 517 518 plaintext->cd_offset = saved_offset; 519 } 520 521 ASSERT(aes_ctx->ac_remainder_len == 0); 522 523 cleanup: 524 (void) aes_free_context(ctx); 525 526 return (ret); 527 } 528 529 530 static int 531 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext, 532 crypto_data_t *ciphertext) 533 { 534 off_t saved_offset; 535 size_t saved_length, out_len; 536 int ret = CRYPTO_SUCCESS; 537 aes_ctx_t *aes_ctx; 538 539 ASSERT(ctx->cc_provider_private != NULL); 540 aes_ctx = ctx->cc_provider_private; 541 542 ASSERT(ciphertext != NULL); 543 544 /* compute number of bytes that will hold the ciphertext */ 545 out_len = aes_ctx->ac_remainder_len; 546 out_len += plaintext->cd_length; 547 out_len &= ~(AES_BLOCK_LEN - 1); 548 549 /* return length needed to store the output */ 550 if (ciphertext->cd_length < out_len) { 551 ciphertext->cd_length = out_len; 552 return (CRYPTO_BUFFER_TOO_SMALL); 553 } 554 555 saved_offset = ciphertext->cd_offset; 556 saved_length = ciphertext->cd_length; 557 558 /* 559 * Do the AES update on the specified input data. 560 */ 561 switch (plaintext->cd_format) { 562 case CRYPTO_DATA_RAW: 563 ret = crypto_update_iov(ctx->cc_provider_private, 564 plaintext, ciphertext, aes_encrypt_contiguous_blocks); 565 break; 566 case CRYPTO_DATA_UIO: 567 ret = crypto_update_uio(ctx->cc_provider_private, 568 plaintext, ciphertext, aes_encrypt_contiguous_blocks); 569 break; 570 default: 571 ret = CRYPTO_ARGUMENTS_BAD; 572 } 573 574 /* 575 * Since AES counter mode is a stream cipher, we call 576 * ctr_mode_final() to pick up any remaining bytes. 577 * It is an internal function that does not destroy 578 * the context like *normal* final routines. 579 */ 580 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 581 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, 582 ciphertext, aes_encrypt_block); 583 } 584 585 if (ret == CRYPTO_SUCCESS) { 586 if (plaintext != ciphertext) 587 ciphertext->cd_length = 588 ciphertext->cd_offset - saved_offset; 589 } else { 590 ciphertext->cd_length = saved_length; 591 } 592 ciphertext->cd_offset = saved_offset; 593 594 return (ret); 595 } 596 597 598 static int 599 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 600 crypto_data_t *plaintext) 601 { 602 off_t saved_offset; 603 size_t saved_length, out_len; 604 int ret = CRYPTO_SUCCESS; 605 aes_ctx_t *aes_ctx; 606 607 ASSERT(ctx->cc_provider_private != NULL); 608 aes_ctx = ctx->cc_provider_private; 609 610 ASSERT(plaintext != NULL); 611 612 /* 613 * Compute number of bytes that will hold the plaintext. 614 * This is not necessary for CCM, GCM, and GMAC since these 615 * mechanisms never return plaintext for update operations. 616 */ 617 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) { 618 out_len = aes_ctx->ac_remainder_len; 619 out_len += ciphertext->cd_length; 620 out_len &= ~(AES_BLOCK_LEN - 1); 621 622 /* return length needed to store the output */ 623 if (plaintext->cd_length < out_len) { 624 plaintext->cd_length = out_len; 625 return (CRYPTO_BUFFER_TOO_SMALL); 626 } 627 } 628 629 saved_offset = plaintext->cd_offset; 630 saved_length = plaintext->cd_length; 631 632 /* 633 * Do the AES update on the specified input data. 634 */ 635 switch (ciphertext->cd_format) { 636 case CRYPTO_DATA_RAW: 637 ret = crypto_update_iov(ctx->cc_provider_private, 638 ciphertext, plaintext, aes_decrypt_contiguous_blocks); 639 break; 640 case CRYPTO_DATA_UIO: 641 ret = crypto_update_uio(ctx->cc_provider_private, 642 ciphertext, plaintext, aes_decrypt_contiguous_blocks); 643 break; 644 default: 645 ret = CRYPTO_ARGUMENTS_BAD; 646 } 647 648 /* 649 * Since AES counter mode is a stream cipher, we call 650 * ctr_mode_final() to pick up any remaining bytes. 651 * It is an internal function that does not destroy 652 * the context like *normal* final routines. 653 */ 654 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 655 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext, 656 aes_encrypt_block); 657 if (ret == CRYPTO_DATA_LEN_RANGE) 658 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 659 } 660 661 if (ret == CRYPTO_SUCCESS) { 662 if (ciphertext != plaintext) 663 plaintext->cd_length = 664 plaintext->cd_offset - saved_offset; 665 } else { 666 plaintext->cd_length = saved_length; 667 } 668 plaintext->cd_offset = saved_offset; 669 670 671 return (ret); 672 } 673 674 static int 675 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data) 676 { 677 aes_ctx_t *aes_ctx; 678 int ret; 679 680 ASSERT(ctx->cc_provider_private != NULL); 681 aes_ctx = ctx->cc_provider_private; 682 683 if (data->cd_format != CRYPTO_DATA_RAW && 684 data->cd_format != CRYPTO_DATA_UIO) { 685 return (CRYPTO_ARGUMENTS_BAD); 686 } 687 688 if (aes_ctx->ac_flags & CTR_MODE) { 689 if (aes_ctx->ac_remainder_len > 0) { 690 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 691 aes_encrypt_block); 692 if (ret != CRYPTO_SUCCESS) 693 return (ret); 694 } 695 } else if (aes_ctx->ac_flags & CCM_MODE) { 696 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data, 697 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 698 if (ret != CRYPTO_SUCCESS) { 699 return (ret); 700 } 701 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 702 size_t saved_offset = data->cd_offset; 703 704 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data, 705 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 706 aes_xor_block); 707 if (ret != CRYPTO_SUCCESS) { 708 return (ret); 709 } 710 data->cd_length = data->cd_offset - saved_offset; 711 data->cd_offset = saved_offset; 712 } else { 713 /* 714 * There must be no unprocessed plaintext. 715 * This happens if the length of the last data is 716 * not a multiple of the AES block length. 717 */ 718 if (aes_ctx->ac_remainder_len > 0) { 719 return (CRYPTO_DATA_LEN_RANGE); 720 } 721 data->cd_length = 0; 722 } 723 724 (void) aes_free_context(ctx); 725 726 return (CRYPTO_SUCCESS); 727 } 728 729 static int 730 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data) 731 { 732 aes_ctx_t *aes_ctx; 733 int ret; 734 off_t saved_offset; 735 size_t saved_length; 736 737 ASSERT(ctx->cc_provider_private != NULL); 738 aes_ctx = ctx->cc_provider_private; 739 740 if (data->cd_format != CRYPTO_DATA_RAW && 741 data->cd_format != CRYPTO_DATA_UIO) { 742 return (CRYPTO_ARGUMENTS_BAD); 743 } 744 745 /* 746 * There must be no unprocessed ciphertext. 747 * This happens if the length of the last ciphertext is 748 * not a multiple of the AES block length. 749 */ 750 if (aes_ctx->ac_remainder_len > 0) { 751 if ((aes_ctx->ac_flags & CTR_MODE) == 0) 752 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 753 else { 754 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 755 aes_encrypt_block); 756 if (ret == CRYPTO_DATA_LEN_RANGE) 757 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 758 if (ret != CRYPTO_SUCCESS) 759 return (ret); 760 } 761 } 762 763 if (aes_ctx->ac_flags & CCM_MODE) { 764 /* 765 * This is where all the plaintext is returned, make sure 766 * the plaintext buffer is big enough 767 */ 768 size_t pt_len = aes_ctx->ac_data_len; 769 if (data->cd_length < pt_len) { 770 data->cd_length = pt_len; 771 return (CRYPTO_BUFFER_TOO_SMALL); 772 } 773 774 ASSERT(aes_ctx->ac_processed_data_len == pt_len); 775 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 776 saved_offset = data->cd_offset; 777 saved_length = data->cd_length; 778 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data, 779 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 780 aes_xor_block); 781 if (ret == CRYPTO_SUCCESS) { 782 data->cd_length = data->cd_offset - saved_offset; 783 } else { 784 data->cd_length = saved_length; 785 } 786 787 data->cd_offset = saved_offset; 788 if (ret != CRYPTO_SUCCESS) { 789 return (ret); 790 } 791 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 792 /* 793 * This is where all the plaintext is returned, make sure 794 * the plaintext buffer is big enough 795 */ 796 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx; 797 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len; 798 799 if (data->cd_length < pt_len) { 800 data->cd_length = pt_len; 801 return (CRYPTO_BUFFER_TOO_SMALL); 802 } 803 804 saved_offset = data->cd_offset; 805 saved_length = data->cd_length; 806 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data, 807 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 808 if (ret == CRYPTO_SUCCESS) { 809 data->cd_length = data->cd_offset - saved_offset; 810 } else { 811 data->cd_length = saved_length; 812 } 813 814 data->cd_offset = saved_offset; 815 if (ret != CRYPTO_SUCCESS) { 816 return (ret); 817 } 818 } 819 820 821 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) { 822 data->cd_length = 0; 823 } 824 825 (void) aes_free_context(ctx); 826 827 return (CRYPTO_SUCCESS); 828 } 829 830 static int 831 aes_encrypt_atomic(crypto_mechanism_t *mechanism, 832 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext, 833 crypto_spi_ctx_template_t template) 834 { 835 aes_ctx_t aes_ctx; 836 off_t saved_offset; 837 size_t saved_length; 838 size_t length_needed; 839 int ret; 840 841 memset(&aes_ctx, 0, sizeof (aes_ctx_t)); 842 843 ASSERT(ciphertext != NULL); 844 845 /* 846 * CTR, CCM, GCM, and GMAC modes do not require that plaintext 847 * be a multiple of AES block size. 848 */ 849 switch (mechanism->cm_type) { 850 case AES_CTR_MECH_INFO_TYPE: 851 case AES_CCM_MECH_INFO_TYPE: 852 case AES_GCM_MECH_INFO_TYPE: 853 case AES_GMAC_MECH_INFO_TYPE: 854 break; 855 default: 856 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 857 return (CRYPTO_DATA_LEN_RANGE); 858 } 859 860 if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS) 861 return (ret); 862 863 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 864 KM_SLEEP, B_TRUE); 865 if (ret != CRYPTO_SUCCESS) 866 return (ret); 867 868 switch (mechanism->cm_type) { 869 case AES_CCM_MECH_INFO_TYPE: 870 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len; 871 break; 872 case AES_GMAC_MECH_INFO_TYPE: 873 if (plaintext->cd_length != 0) 874 return (CRYPTO_ARGUMENTS_BAD); 875 zfs_fallthrough; 876 case AES_GCM_MECH_INFO_TYPE: 877 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len; 878 break; 879 default: 880 length_needed = plaintext->cd_length; 881 } 882 883 /* return size of buffer needed to store output */ 884 if (ciphertext->cd_length < length_needed) { 885 ciphertext->cd_length = length_needed; 886 ret = CRYPTO_BUFFER_TOO_SMALL; 887 goto out; 888 } 889 890 saved_offset = ciphertext->cd_offset; 891 saved_length = ciphertext->cd_length; 892 893 /* 894 * Do an update on the specified input data. 895 */ 896 switch (plaintext->cd_format) { 897 case CRYPTO_DATA_RAW: 898 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext, 899 aes_encrypt_contiguous_blocks); 900 break; 901 case CRYPTO_DATA_UIO: 902 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext, 903 aes_encrypt_contiguous_blocks); 904 break; 905 default: 906 ret = CRYPTO_ARGUMENTS_BAD; 907 } 908 909 if (ret == CRYPTO_SUCCESS) { 910 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 911 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx, 912 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 913 aes_xor_block); 914 if (ret != CRYPTO_SUCCESS) 915 goto out; 916 ASSERT(aes_ctx.ac_remainder_len == 0); 917 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 918 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 919 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx, 920 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 921 aes_copy_block, aes_xor_block); 922 if (ret != CRYPTO_SUCCESS) 923 goto out; 924 ASSERT(aes_ctx.ac_remainder_len == 0); 925 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) { 926 if (aes_ctx.ac_remainder_len > 0) { 927 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 928 ciphertext, aes_encrypt_block); 929 if (ret != CRYPTO_SUCCESS) 930 goto out; 931 } 932 } else { 933 ASSERT(aes_ctx.ac_remainder_len == 0); 934 } 935 936 if (plaintext != ciphertext) { 937 ciphertext->cd_length = 938 ciphertext->cd_offset - saved_offset; 939 } 940 } else { 941 ciphertext->cd_length = saved_length; 942 } 943 ciphertext->cd_offset = saved_offset; 944 945 out: 946 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 947 memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len); 948 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 949 } 950 if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) { 951 gcm_clear_ctx((gcm_ctx_t *)&aes_ctx); 952 } 953 return (ret); 954 } 955 956 static int 957 aes_decrypt_atomic(crypto_mechanism_t *mechanism, 958 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext, 959 crypto_spi_ctx_template_t template) 960 { 961 aes_ctx_t aes_ctx; 962 off_t saved_offset; 963 size_t saved_length; 964 size_t length_needed; 965 int ret; 966 967 memset(&aes_ctx, 0, sizeof (aes_ctx_t)); 968 969 ASSERT(plaintext != NULL); 970 971 /* 972 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext 973 * be a multiple of AES block size. 974 */ 975 switch (mechanism->cm_type) { 976 case AES_CTR_MECH_INFO_TYPE: 977 case AES_CCM_MECH_INFO_TYPE: 978 case AES_GCM_MECH_INFO_TYPE: 979 case AES_GMAC_MECH_INFO_TYPE: 980 break; 981 default: 982 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 983 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 984 } 985 986 if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS) 987 return (ret); 988 989 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 990 KM_SLEEP, B_FALSE); 991 if (ret != CRYPTO_SUCCESS) 992 return (ret); 993 994 switch (mechanism->cm_type) { 995 case AES_CCM_MECH_INFO_TYPE: 996 length_needed = aes_ctx.ac_data_len; 997 break; 998 case AES_GCM_MECH_INFO_TYPE: 999 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len; 1000 break; 1001 case AES_GMAC_MECH_INFO_TYPE: 1002 if (plaintext->cd_length != 0) 1003 return (CRYPTO_ARGUMENTS_BAD); 1004 length_needed = 0; 1005 break; 1006 default: 1007 length_needed = ciphertext->cd_length; 1008 } 1009 1010 /* return size of buffer needed to store output */ 1011 if (plaintext->cd_length < length_needed) { 1012 plaintext->cd_length = length_needed; 1013 ret = CRYPTO_BUFFER_TOO_SMALL; 1014 goto out; 1015 } 1016 1017 saved_offset = plaintext->cd_offset; 1018 saved_length = plaintext->cd_length; 1019 1020 /* 1021 * Do an update on the specified input data. 1022 */ 1023 switch (ciphertext->cd_format) { 1024 case CRYPTO_DATA_RAW: 1025 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext, 1026 aes_decrypt_contiguous_blocks); 1027 break; 1028 case CRYPTO_DATA_UIO: 1029 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext, 1030 aes_decrypt_contiguous_blocks); 1031 break; 1032 default: 1033 ret = CRYPTO_ARGUMENTS_BAD; 1034 } 1035 1036 if (ret == CRYPTO_SUCCESS) { 1037 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1038 ASSERT(aes_ctx.ac_processed_data_len 1039 == aes_ctx.ac_data_len); 1040 ASSERT(aes_ctx.ac_processed_mac_len 1041 == aes_ctx.ac_mac_len); 1042 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx, 1043 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1044 aes_copy_block, aes_xor_block); 1045 ASSERT(aes_ctx.ac_remainder_len == 0); 1046 if ((ret == CRYPTO_SUCCESS) && 1047 (ciphertext != plaintext)) { 1048 plaintext->cd_length = 1049 plaintext->cd_offset - saved_offset; 1050 } else { 1051 plaintext->cd_length = saved_length; 1052 } 1053 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1054 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1055 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx, 1056 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1057 aes_xor_block); 1058 ASSERT(aes_ctx.ac_remainder_len == 0); 1059 if ((ret == CRYPTO_SUCCESS) && 1060 (ciphertext != plaintext)) { 1061 plaintext->cd_length = 1062 plaintext->cd_offset - saved_offset; 1063 } else { 1064 plaintext->cd_length = saved_length; 1065 } 1066 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) { 1067 ASSERT(aes_ctx.ac_remainder_len == 0); 1068 if (ciphertext != plaintext) 1069 plaintext->cd_length = 1070 plaintext->cd_offset - saved_offset; 1071 } else { 1072 if (aes_ctx.ac_remainder_len > 0) { 1073 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1074 plaintext, aes_encrypt_block); 1075 if (ret == CRYPTO_DATA_LEN_RANGE) 1076 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 1077 if (ret != CRYPTO_SUCCESS) 1078 goto out; 1079 } 1080 if (ciphertext != plaintext) 1081 plaintext->cd_length = 1082 plaintext->cd_offset - saved_offset; 1083 } 1084 } else { 1085 plaintext->cd_length = saved_length; 1086 } 1087 plaintext->cd_offset = saved_offset; 1088 1089 out: 1090 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1091 memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len); 1092 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1093 } 1094 1095 if (aes_ctx.ac_flags & CCM_MODE) { 1096 if (aes_ctx.ac_pt_buf != NULL) { 1097 vmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len); 1098 } 1099 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) { 1100 gcm_clear_ctx((gcm_ctx_t *)&aes_ctx); 1101 } 1102 1103 return (ret); 1104 } 1105 1106 /* 1107 * KCF software provider context template entry points. 1108 */ 1109 static int 1110 aes_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key, 1111 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size) 1112 { 1113 void *keysched; 1114 size_t size; 1115 int rv; 1116 1117 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE && 1118 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE && 1119 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE && 1120 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE && 1121 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE && 1122 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE) 1123 return (CRYPTO_MECHANISM_INVALID); 1124 1125 if ((keysched = aes_alloc_keysched(&size, KM_SLEEP)) == NULL) { 1126 return (CRYPTO_HOST_MEMORY); 1127 } 1128 1129 /* 1130 * Initialize key schedule. Key length information is stored 1131 * in the key. 1132 */ 1133 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1134 memset(keysched, 0, size); 1135 kmem_free(keysched, size); 1136 return (rv); 1137 } 1138 1139 *tmpl = keysched; 1140 *tmpl_size = size; 1141 1142 return (CRYPTO_SUCCESS); 1143 } 1144 1145 1146 static int 1147 aes_free_context(crypto_ctx_t *ctx) 1148 { 1149 aes_ctx_t *aes_ctx = ctx->cc_provider_private; 1150 1151 if (aes_ctx != NULL) { 1152 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1153 ASSERT(aes_ctx->ac_keysched_len != 0); 1154 memset(aes_ctx->ac_keysched, 0, 1155 aes_ctx->ac_keysched_len); 1156 kmem_free(aes_ctx->ac_keysched, 1157 aes_ctx->ac_keysched_len); 1158 } 1159 crypto_free_mode_ctx(aes_ctx); 1160 ctx->cc_provider_private = NULL; 1161 } 1162 1163 return (CRYPTO_SUCCESS); 1164 } 1165 1166 1167 static int 1168 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template, 1169 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag, 1170 boolean_t is_encrypt_init) 1171 { 1172 int rv = CRYPTO_SUCCESS; 1173 void *keysched; 1174 size_t size = 0; 1175 1176 if (template == NULL) { 1177 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL) 1178 return (CRYPTO_HOST_MEMORY); 1179 /* 1180 * Initialize key schedule. 1181 * Key length is stored in the key. 1182 */ 1183 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1184 kmem_free(keysched, size); 1185 return (rv); 1186 } 1187 1188 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE; 1189 aes_ctx->ac_keysched_len = size; 1190 } else { 1191 keysched = template; 1192 } 1193 aes_ctx->ac_keysched = keysched; 1194 1195 switch (mechanism->cm_type) { 1196 case AES_CBC_MECH_INFO_TYPE: 1197 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param, 1198 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64); 1199 break; 1200 case AES_CTR_MECH_INFO_TYPE: { 1201 CK_AES_CTR_PARAMS *pp; 1202 1203 if (mechanism->cm_param == NULL || 1204 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) { 1205 return (CRYPTO_MECHANISM_PARAM_INVALID); 1206 } 1207 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param; 1208 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits, 1209 pp->cb, aes_copy_block); 1210 break; 1211 } 1212 case AES_CCM_MECH_INFO_TYPE: 1213 if (mechanism->cm_param == NULL || 1214 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) { 1215 return (CRYPTO_MECHANISM_PARAM_INVALID); 1216 } 1217 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param, 1218 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block, 1219 aes_xor_block); 1220 break; 1221 case AES_GCM_MECH_INFO_TYPE: 1222 if (mechanism->cm_param == NULL || 1223 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) { 1224 return (CRYPTO_MECHANISM_PARAM_INVALID); 1225 } 1226 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1227 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1228 aes_xor_block); 1229 break; 1230 case AES_GMAC_MECH_INFO_TYPE: 1231 if (mechanism->cm_param == NULL || 1232 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) { 1233 return (CRYPTO_MECHANISM_PARAM_INVALID); 1234 } 1235 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1236 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1237 aes_xor_block); 1238 break; 1239 case AES_ECB_MECH_INFO_TYPE: 1240 aes_ctx->ac_flags |= ECB_MODE; 1241 } 1242 1243 if (rv != CRYPTO_SUCCESS) { 1244 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1245 memset(keysched, 0, size); 1246 kmem_free(keysched, size); 1247 } 1248 } 1249 1250 return (rv); 1251 } 1252 1253 static int 1254 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data, 1255 CK_AES_GCM_PARAMS *gcm_params) 1256 { 1257 /* LINTED: pointer alignment */ 1258 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param; 1259 1260 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE) 1261 return (CRYPTO_MECHANISM_INVALID); 1262 1263 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) 1264 return (CRYPTO_MECHANISM_PARAM_INVALID); 1265 1266 if (params->pIv == NULL) 1267 return (CRYPTO_MECHANISM_PARAM_INVALID); 1268 1269 gcm_params->pIv = params->pIv; 1270 gcm_params->ulIvLen = AES_GMAC_IV_LEN; 1271 gcm_params->ulTagBits = AES_GMAC_TAG_BITS; 1272 1273 if (data == NULL) 1274 return (CRYPTO_SUCCESS); 1275 1276 if (data->cd_format != CRYPTO_DATA_RAW) 1277 return (CRYPTO_ARGUMENTS_BAD); 1278 1279 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base; 1280 gcm_params->ulAADLen = data->cd_length; 1281 return (CRYPTO_SUCCESS); 1282 } 1283 1284 static int 1285 aes_mac_atomic(crypto_mechanism_t *mechanism, 1286 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1287 crypto_spi_ctx_template_t template) 1288 { 1289 CK_AES_GCM_PARAMS gcm_params; 1290 crypto_mechanism_t gcm_mech; 1291 int rv; 1292 1293 if ((rv = process_gmac_mech(mechanism, data, &gcm_params)) 1294 != CRYPTO_SUCCESS) 1295 return (rv); 1296 1297 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE; 1298 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS); 1299 gcm_mech.cm_param = (char *)&gcm_params; 1300 1301 return (aes_encrypt_atomic(&gcm_mech, 1302 key, &null_crypto_data, mac, template)); 1303 } 1304 1305 static int 1306 aes_mac_verify_atomic(crypto_mechanism_t *mechanism, crypto_key_t *key, 1307 crypto_data_t *data, crypto_data_t *mac, crypto_spi_ctx_template_t template) 1308 { 1309 CK_AES_GCM_PARAMS gcm_params; 1310 crypto_mechanism_t gcm_mech; 1311 int rv; 1312 1313 if ((rv = process_gmac_mech(mechanism, data, &gcm_params)) 1314 != CRYPTO_SUCCESS) 1315 return (rv); 1316 1317 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE; 1318 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS); 1319 gcm_mech.cm_param = (char *)&gcm_params; 1320 1321 return (aes_decrypt_atomic(&gcm_mech, 1322 key, mac, &null_crypto_data, template)); 1323 } 1324