1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2008 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 26 /* 27 * AES provider for the Kernel Cryptographic Framework (KCF) 28 */ 29 30 #include <sys/types.h> 31 #include <sys/systm.h> 32 #include <sys/modctl.h> 33 #include <sys/cmn_err.h> 34 #include <sys/ddi.h> 35 #include <sys/crypto/common.h> 36 #include <sys/crypto/impl.h> 37 #include <sys/crypto/spi.h> 38 #include <sys/sysmacros.h> 39 #include <sys/strsun.h> 40 #include <modes/modes.h> 41 #include <aes/aes_impl.h> 42 43 extern struct mod_ops mod_cryptoops; 44 45 /* 46 * Module linkage information for the kernel. 47 */ 48 static struct modlcrypto modlcrypto = { 49 &mod_cryptoops, 50 "AES Kernel SW Provider" 51 }; 52 53 static struct modlinkage modlinkage = { 54 MODREV_1, 55 (void *)&modlcrypto, 56 NULL 57 }; 58 59 /* 60 * CSPI information (entry points, provider info, etc.) 61 */ 62 typedef enum aes_mech_type { 63 AES_ECB_MECH_INFO_TYPE, /* SUN_CKM_AES_ECB */ 64 AES_CBC_MECH_INFO_TYPE, /* SUN_CKM_AES_CBC */ 65 AES_CBC_PAD_MECH_INFO_TYPE, /* SUN_CKM_AES_CBC_PAD */ 66 AES_CTR_MECH_INFO_TYPE, /* SUN_CKM_AES_CTR */ 67 AES_CCM_MECH_INFO_TYPE, /* SUN_CKM_AES_CCM */ 68 AES_GCM_MECH_INFO_TYPE /* SUN_CKM_AES_GCM */ 69 } aes_mech_type_t; 70 71 /* 72 * The following definitions are to keep EXPORT_SRC happy. 73 */ 74 #ifndef AES_MIN_KEY_BYTES 75 #define AES_MIN_KEY_BYTES 0 76 #endif 77 78 #ifndef AES_MAX_KEY_BYTES 79 #define AES_MAX_KEY_BYTES 0 80 #endif 81 82 /* 83 * Mechanism info structure passed to KCF during registration. 84 */ 85 static crypto_mech_info_t aes_mech_info_tab[] = { 86 /* AES_ECB */ 87 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE, 88 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 89 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 90 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 91 /* AES_CBC */ 92 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE, 93 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 94 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 95 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 96 /* AES_CTR */ 97 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE, 98 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 99 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 100 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 101 /* AES_CCM */ 102 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE, 103 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 104 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 105 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 106 /* AES_GCM */ 107 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE, 108 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 109 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 110 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES} 111 }; 112 113 /* operations are in-place if the output buffer is NULL */ 114 #define AES_ARG_INPLACE(input, output) \ 115 if ((output) == NULL) \ 116 (output) = (input); 117 118 static void aes_provider_status(crypto_provider_handle_t, uint_t *); 119 120 static crypto_control_ops_t aes_control_ops = { 121 aes_provider_status 122 }; 123 124 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 125 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 126 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 127 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 128 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *, 129 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t); 130 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *, 131 crypto_mechanism_t *, crypto_key_t *, int, boolean_t); 132 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *, 133 crypto_req_handle_t); 134 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *, 135 crypto_req_handle_t); 136 137 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 138 crypto_req_handle_t); 139 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *, 140 crypto_data_t *, crypto_req_handle_t); 141 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t, 142 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 143 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 144 145 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 146 crypto_req_handle_t); 147 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *, 148 crypto_data_t *, crypto_req_handle_t); 149 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t, 150 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 151 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 152 153 static crypto_cipher_ops_t aes_cipher_ops = { 154 aes_encrypt_init, 155 aes_encrypt, 156 aes_encrypt_update, 157 aes_encrypt_final, 158 aes_encrypt_atomic, 159 aes_decrypt_init, 160 aes_decrypt, 161 aes_decrypt_update, 162 aes_decrypt_final, 163 aes_decrypt_atomic 164 }; 165 166 static int aes_create_ctx_template(crypto_provider_handle_t, 167 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *, 168 size_t *, crypto_req_handle_t); 169 static int aes_free_context(crypto_ctx_t *); 170 171 static crypto_ctx_ops_t aes_ctx_ops = { 172 aes_create_ctx_template, 173 aes_free_context 174 }; 175 176 static crypto_ops_t aes_crypto_ops = { 177 &aes_control_ops, 178 NULL, 179 &aes_cipher_ops, 180 NULL, 181 NULL, 182 NULL, 183 NULL, 184 NULL, 185 NULL, 186 NULL, 187 NULL, 188 NULL, 189 NULL, 190 &aes_ctx_ops 191 }; 192 193 static crypto_provider_info_t aes_prov_info = { 194 CRYPTO_SPI_VERSION_1, 195 "AES Software Provider", 196 CRYPTO_SW_PROVIDER, 197 {&modlinkage}, 198 NULL, 199 &aes_crypto_ops, 200 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t), 201 aes_mech_info_tab 202 }; 203 204 static crypto_kcf_provider_handle_t aes_prov_handle = NULL; 205 206 int 207 _init(void) 208 { 209 int ret; 210 211 /* 212 * Register with KCF. If the registration fails, return error. 213 */ 214 if ((ret = crypto_register_provider(&aes_prov_info, 215 &aes_prov_handle)) != CRYPTO_SUCCESS) { 216 cmn_err(CE_WARN, "%s _init: crypto_register_provider()" 217 "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret); 218 return (EACCES); 219 } 220 221 if ((ret = mod_install(&modlinkage)) != 0) { 222 int rv; 223 224 ASSERT(aes_prov_handle != NULL); 225 /* We should not return if the unregister returns busy. */ 226 while ((rv = crypto_unregister_provider(aes_prov_handle)) 227 == CRYPTO_BUSY) { 228 cmn_err(CE_WARN, 229 "%s _init: crypto_unregister_provider() " 230 "failed (0x%x). Retrying.", 231 CRYPTO_PROVIDER_NAME, rv); 232 /* wait 10 seconds and try again. */ 233 delay(10 * drv_usectohz(1000000)); 234 } 235 } 236 237 return (ret); 238 } 239 240 int 241 _fini(void) 242 { 243 int ret; 244 245 /* 246 * Unregister from KCF if previous registration succeeded. 247 */ 248 if (aes_prov_handle != NULL) { 249 if ((ret = crypto_unregister_provider(aes_prov_handle)) != 250 CRYPTO_SUCCESS) { 251 cmn_err(CE_WARN, 252 "%s _fini: crypto_unregister_provider() " 253 "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret); 254 return (EBUSY); 255 } 256 aes_prov_handle = NULL; 257 } 258 259 return (mod_remove(&modlinkage)); 260 } 261 262 int 263 _info(struct modinfo *modinfop) 264 { 265 return (mod_info(&modlinkage, modinfop)); 266 } 267 268 269 static int 270 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag) 271 { 272 void *p = NULL; 273 int rv = CRYPTO_SUCCESS; 274 275 switch (mechanism->cm_type) { 276 case AES_ECB_MECH_INFO_TYPE: 277 /* no parameter */ 278 if (ctx != NULL) 279 p = ecb_alloc_ctx(kmflag); 280 break; 281 case AES_CBC_MECH_INFO_TYPE: 282 if (mechanism->cm_param != NULL && 283 mechanism->cm_param_len != AES_BLOCK_LEN) { 284 rv = CRYPTO_MECHANISM_PARAM_INVALID; 285 break; 286 } 287 if (ctx != NULL) 288 p = cbc_alloc_ctx(kmflag); 289 break; 290 case AES_CTR_MECH_INFO_TYPE: 291 if (mechanism->cm_param != NULL && 292 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) { 293 rv = CRYPTO_MECHANISM_PARAM_INVALID; 294 break; 295 } 296 if (ctx != NULL) 297 p = ctr_alloc_ctx(kmflag); 298 break; 299 case AES_CCM_MECH_INFO_TYPE: 300 if (mechanism->cm_param != NULL && 301 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) { 302 rv = CRYPTO_MECHANISM_PARAM_INVALID; 303 break; 304 } 305 if (ctx != NULL) 306 p = ccm_alloc_ctx(kmflag); 307 break; 308 case AES_GCM_MECH_INFO_TYPE: 309 if (mechanism->cm_param != NULL && 310 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) { 311 rv = CRYPTO_MECHANISM_PARAM_INVALID; 312 break; 313 } 314 if (ctx != NULL) 315 p = gcm_alloc_ctx(kmflag); 316 break; 317 default: 318 rv = CRYPTO_MECHANISM_INVALID; 319 } 320 if (ctx != NULL) 321 *ctx = p; 322 323 return (rv); 324 } 325 326 /* EXPORT DELETE START */ 327 328 /* 329 * Initialize key schedules for AES 330 */ 331 static int 332 init_keysched(crypto_key_t *key, void *newbie) 333 { 334 /* 335 * Only keys by value are supported by this module. 336 */ 337 switch (key->ck_format) { 338 case CRYPTO_KEY_RAW: 339 if (key->ck_length < AES_MINBITS || 340 key->ck_length > AES_MAXBITS) { 341 return (CRYPTO_KEY_SIZE_RANGE); 342 } 343 344 /* key length must be either 128, 192, or 256 */ 345 if ((key->ck_length & 63) != 0) 346 return (CRYPTO_KEY_SIZE_RANGE); 347 break; 348 default: 349 return (CRYPTO_KEY_TYPE_INCONSISTENT); 350 } 351 352 aes_init_keysched(key->ck_data, key->ck_length, newbie); 353 return (CRYPTO_SUCCESS); 354 } 355 356 /* EXPORT DELETE END */ 357 358 /* 359 * KCF software provider control entry points. 360 */ 361 /* ARGSUSED */ 362 static void 363 aes_provider_status(crypto_provider_handle_t provider, uint_t *status) 364 { 365 *status = CRYPTO_PROVIDER_READY; 366 } 367 368 static int 369 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 370 crypto_key_t *key, crypto_spi_ctx_template_t template, 371 crypto_req_handle_t req) { 372 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE)); 373 } 374 375 static int 376 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 377 crypto_key_t *key, crypto_spi_ctx_template_t template, 378 crypto_req_handle_t req) { 379 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE)); 380 } 381 382 383 384 /* 385 * KCF software provider encrypt entry points. 386 */ 387 static int 388 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 389 crypto_key_t *key, crypto_spi_ctx_template_t template, 390 crypto_req_handle_t req, boolean_t is_encrypt_init) 391 { 392 393 /* EXPORT DELETE START */ 394 395 aes_ctx_t *aes_ctx; 396 int rv; 397 int kmflag; 398 399 /* 400 * Only keys by value are supported by this module. 401 */ 402 if (key->ck_format != CRYPTO_KEY_RAW) { 403 return (CRYPTO_KEY_TYPE_INCONSISTENT); 404 } 405 406 kmflag = crypto_kmflag(req); 407 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag)) 408 != CRYPTO_SUCCESS) 409 return (rv); 410 411 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag, 412 is_encrypt_init); 413 if (rv != CRYPTO_SUCCESS) { 414 crypto_free_mode_ctx(aes_ctx); 415 return (rv); 416 } 417 418 ctx->cc_provider_private = aes_ctx; 419 420 /* EXPORT DELETE END */ 421 422 return (CRYPTO_SUCCESS); 423 } 424 425 static void 426 aes_copy_block64(uint8_t *in, uint64_t *out) 427 { 428 if (IS_P2ALIGNED(in, sizeof (uint64_t))) { 429 /* LINTED: pointer alignment */ 430 out[0] = *(uint64_t *)&in[0]; 431 /* LINTED: pointer alignment */ 432 out[1] = *(uint64_t *)&in[8]; 433 } else { 434 uint8_t *iv8 = (uint8_t *)&out[0]; 435 436 AES_COPY_BLOCK(in, iv8); 437 } 438 } 439 440 /* ARGSUSED */ 441 static int 442 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext, 443 crypto_data_t *ciphertext, crypto_req_handle_t req) 444 { 445 int ret = CRYPTO_FAILED; 446 447 /* EXPORT DELETE START */ 448 449 aes_ctx_t *aes_ctx; 450 size_t saved_length, saved_offset, length_needed; 451 452 ASSERT(ctx->cc_provider_private != NULL); 453 aes_ctx = ctx->cc_provider_private; 454 455 /* 456 * For block ciphers, plaintext must be a multiple of AES block size. 457 * This test is only valid for ciphers whose blocksize is a power of 2. 458 * Even though AES CCM mode is a block cipher, it does not 459 * require the plaintext to be a multiple of AES block size. 460 * The length requirement for AES CCM mode has already been checked 461 * at init time 462 */ 463 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE)) == 0) && 464 (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 465 return (CRYPTO_DATA_LEN_RANGE); 466 467 AES_ARG_INPLACE(plaintext, ciphertext); 468 469 /* 470 * We need to just return the length needed to store the output. 471 * We should not destroy the context for the following case. 472 */ 473 if (aes_ctx->ac_flags & CCM_MODE) { 474 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len; 475 } else if (aes_ctx->ac_flags & GCM_MODE) { 476 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len; 477 } else { 478 length_needed = plaintext->cd_length; 479 } 480 481 if (ciphertext->cd_length < length_needed) { 482 ciphertext->cd_length = length_needed; 483 return (CRYPTO_BUFFER_TOO_SMALL); 484 } 485 486 saved_length = ciphertext->cd_length; 487 saved_offset = ciphertext->cd_offset; 488 489 /* 490 * Do an update on the specified input data. 491 */ 492 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req); 493 if (ret != CRYPTO_SUCCESS) { 494 return (ret); 495 } 496 497 /* 498 * For CCM mode, aes_ccm_encrypt_final() will take care of any 499 * left-over unprocessed data, and compute the MAC 500 */ 501 if (aes_ctx->ac_flags & CCM_MODE) { 502 /* 503 * ccm_encrypt_final() will compute the MAC and append 504 * it to existing ciphertext. So, need to adjust the left over 505 * length value accordingly 506 */ 507 508 /* order of following 2 lines MUST not be reversed */ 509 ciphertext->cd_offset = ciphertext->cd_length; 510 ciphertext->cd_length = saved_length - ciphertext->cd_length; 511 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext, 512 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 513 if (ret != CRYPTO_SUCCESS) { 514 return (ret); 515 } 516 517 if (plaintext != ciphertext) { 518 ciphertext->cd_length = 519 ciphertext->cd_offset - saved_offset; 520 } 521 ciphertext->cd_offset = saved_offset; 522 } else if (aes_ctx->ac_flags & GCM_MODE) { 523 /* 524 * gcm_encrypt_final() will compute the MAC and append 525 * it to existing ciphertext. So, need to adjust the left over 526 * length value accordingly 527 */ 528 529 /* order of following 2 lines MUST not be reversed */ 530 ciphertext->cd_offset = ciphertext->cd_length; 531 ciphertext->cd_length = saved_length - ciphertext->cd_length; 532 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext, 533 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 534 aes_xor_block); 535 if (ret != CRYPTO_SUCCESS) { 536 return (ret); 537 } 538 539 if (plaintext != ciphertext) { 540 ciphertext->cd_length = 541 ciphertext->cd_offset - saved_offset; 542 } 543 ciphertext->cd_offset = saved_offset; 544 } 545 546 ASSERT(aes_ctx->ac_remainder_len == 0); 547 (void) aes_free_context(ctx); 548 549 /* EXPORT DELETE END */ 550 551 /* LINTED */ 552 return (ret); 553 } 554 555 /* ARGSUSED */ 556 static int 557 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 558 crypto_data_t *plaintext, crypto_req_handle_t req) 559 { 560 int ret = CRYPTO_FAILED; 561 562 /* EXPORT DELETE START */ 563 564 aes_ctx_t *aes_ctx; 565 off_t saved_offset; 566 size_t saved_length; 567 568 ASSERT(ctx->cc_provider_private != NULL); 569 aes_ctx = ctx->cc_provider_private; 570 571 /* 572 * For block ciphers, plaintext must be a multiple of AES block size. 573 * This test is only valid for ciphers whose blocksize is a power of 2. 574 * Even though AES CCM mode is a block cipher, it does not 575 * require the plaintext to be a multiple of AES block size. 576 * The length requirement for AES CCM mode has already been checked 577 * at init time 578 */ 579 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE)) == 0) && 580 (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) { 581 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 582 } 583 584 AES_ARG_INPLACE(ciphertext, plaintext); 585 586 /* 587 * We need to just return the length needed to store the output. 588 * We should not destroy the context for the following case. 589 * 590 * For AES CCM mode, size of the plaintext will be MAC_SIZE 591 * smaller than size of the cipher text. 592 */ 593 if (aes_ctx->ac_flags & CCM_MODE) { 594 if (plaintext->cd_length < aes_ctx->ac_processed_data_len) { 595 plaintext->cd_length = aes_ctx->ac_processed_data_len; 596 return (CRYPTO_BUFFER_TOO_SMALL); 597 } 598 saved_offset = plaintext->cd_offset; 599 saved_length = plaintext->cd_length; 600 } else if (aes_ctx->ac_flags & GCM_MODE) { 601 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx; 602 size_t pt_len = ciphertext->cd_length - ctx->gcm_tag_len; 603 604 if (plaintext->cd_length < pt_len) { 605 plaintext->cd_length = pt_len; 606 return (CRYPTO_BUFFER_TOO_SMALL); 607 } 608 saved_offset = plaintext->cd_offset; 609 saved_length = plaintext->cd_length; 610 } else if (plaintext->cd_length < ciphertext->cd_length) { 611 plaintext->cd_length = ciphertext->cd_length; 612 return (CRYPTO_BUFFER_TOO_SMALL); 613 } 614 615 /* 616 * Do an update on the specified input data. 617 */ 618 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req); 619 if (ret != CRYPTO_SUCCESS) { 620 goto cleanup; 621 } 622 623 if (aes_ctx->ac_flags & CCM_MODE) { 624 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len); 625 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 626 627 /* order of following 2 lines MUST not be reversed */ 628 plaintext->cd_offset = plaintext->cd_length; 629 plaintext->cd_length = saved_length - plaintext->cd_length; 630 631 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext, 632 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 633 aes_xor_block); 634 if (ret == CRYPTO_SUCCESS) { 635 if (plaintext != ciphertext) { 636 plaintext->cd_length = 637 plaintext->cd_offset - saved_offset; 638 } 639 } else { 640 plaintext->cd_length = saved_length; 641 } 642 643 plaintext->cd_offset = saved_offset; 644 } else if (aes_ctx->ac_flags & GCM_MODE) { 645 /* order of following 2 lines MUST not be reversed */ 646 plaintext->cd_offset = plaintext->cd_length; 647 plaintext->cd_length = saved_length - plaintext->cd_length; 648 649 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext, 650 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 651 if (ret == CRYPTO_SUCCESS) { 652 if (plaintext != ciphertext) { 653 plaintext->cd_length = 654 plaintext->cd_offset - saved_offset; 655 } 656 } else { 657 plaintext->cd_length = saved_length; 658 } 659 660 plaintext->cd_offset = saved_offset; 661 } 662 663 ASSERT(aes_ctx->ac_remainder_len == 0); 664 665 cleanup: 666 (void) aes_free_context(ctx); 667 668 /* EXPORT DELETE END */ 669 670 /* LINTED */ 671 return (ret); 672 } 673 674 /* ARGSUSED */ 675 static int 676 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext, 677 crypto_data_t *ciphertext, crypto_req_handle_t req) 678 { 679 off_t saved_offset; 680 size_t saved_length, out_len; 681 int ret = CRYPTO_SUCCESS; 682 aes_ctx_t *aes_ctx; 683 684 ASSERT(ctx->cc_provider_private != NULL); 685 aes_ctx = ctx->cc_provider_private; 686 687 AES_ARG_INPLACE(plaintext, ciphertext); 688 689 /* compute number of bytes that will hold the ciphertext */ 690 out_len = aes_ctx->ac_remainder_len; 691 out_len += plaintext->cd_length; 692 out_len &= ~(AES_BLOCK_LEN - 1); 693 694 /* return length needed to store the output */ 695 if (ciphertext->cd_length < out_len) { 696 ciphertext->cd_length = out_len; 697 return (CRYPTO_BUFFER_TOO_SMALL); 698 } 699 700 saved_offset = ciphertext->cd_offset; 701 saved_length = ciphertext->cd_length; 702 703 704 /* 705 * Do the AES update on the specified input data. 706 */ 707 switch (plaintext->cd_format) { 708 case CRYPTO_DATA_RAW: 709 ret = crypto_update_iov(ctx->cc_provider_private, 710 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 711 aes_copy_block64); 712 break; 713 case CRYPTO_DATA_UIO: 714 ret = crypto_update_uio(ctx->cc_provider_private, 715 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 716 aes_copy_block64); 717 break; 718 case CRYPTO_DATA_MBLK: 719 ret = crypto_update_mp(ctx->cc_provider_private, 720 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 721 aes_copy_block64); 722 break; 723 default: 724 ret = CRYPTO_ARGUMENTS_BAD; 725 } 726 727 /* 728 * Since AES counter mode is a stream cipher, we call 729 * ctr_mode_final() to pick up any remaining bytes. 730 * It is an internal function that does not destroy 731 * the context like *normal* final routines. 732 */ 733 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 734 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, 735 ciphertext, aes_encrypt_block); 736 } 737 738 if (ret == CRYPTO_SUCCESS) { 739 if (plaintext != ciphertext) 740 ciphertext->cd_length = 741 ciphertext->cd_offset - saved_offset; 742 } else { 743 ciphertext->cd_length = saved_length; 744 } 745 ciphertext->cd_offset = saved_offset; 746 747 return (ret); 748 } 749 750 /* ARGSUSED */ 751 static int 752 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 753 crypto_data_t *plaintext, crypto_req_handle_t req) 754 { 755 off_t saved_offset; 756 size_t saved_length, out_len; 757 int ret = CRYPTO_SUCCESS; 758 aes_ctx_t *aes_ctx; 759 760 ASSERT(ctx->cc_provider_private != NULL); 761 aes_ctx = ctx->cc_provider_private; 762 763 AES_ARG_INPLACE(ciphertext, plaintext); 764 765 /* 766 * Compute number of bytes that will hold the plaintext. 767 * This is not necessary for CCM and GCM since these mechanisms 768 * never return plaintext for update operations. 769 */ 770 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE)) == 0) { 771 out_len = aes_ctx->ac_remainder_len; 772 out_len += ciphertext->cd_length; 773 out_len &= ~(AES_BLOCK_LEN - 1); 774 775 /* return length needed to store the output */ 776 if (plaintext->cd_length < out_len) { 777 plaintext->cd_length = out_len; 778 return (CRYPTO_BUFFER_TOO_SMALL); 779 } 780 } 781 782 saved_offset = plaintext->cd_offset; 783 saved_length = plaintext->cd_length; 784 785 if (aes_ctx->ac_flags & GCM_MODE) 786 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req)); 787 788 /* 789 * Do the AES update on the specified input data. 790 */ 791 switch (ciphertext->cd_format) { 792 case CRYPTO_DATA_RAW: 793 ret = crypto_update_iov(ctx->cc_provider_private, 794 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 795 aes_copy_block64); 796 break; 797 case CRYPTO_DATA_UIO: 798 ret = crypto_update_uio(ctx->cc_provider_private, 799 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 800 aes_copy_block64); 801 break; 802 case CRYPTO_DATA_MBLK: 803 ret = crypto_update_mp(ctx->cc_provider_private, 804 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 805 aes_copy_block64); 806 break; 807 default: 808 ret = CRYPTO_ARGUMENTS_BAD; 809 } 810 811 /* 812 * Since AES counter mode is a stream cipher, we call 813 * ctr_mode_final() to pick up any remaining bytes. 814 * It is an internal function that does not destroy 815 * the context like *normal* final routines. 816 */ 817 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 818 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext, 819 aes_encrypt_block); 820 if (ret == CRYPTO_DATA_LEN_RANGE) 821 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 822 } 823 824 if (ret == CRYPTO_SUCCESS) { 825 if (ciphertext != plaintext) 826 plaintext->cd_length = 827 plaintext->cd_offset - saved_offset; 828 } else { 829 plaintext->cd_length = saved_length; 830 } 831 plaintext->cd_offset = saved_offset; 832 833 834 return (ret); 835 } 836 837 /* ARGSUSED */ 838 static int 839 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data, 840 crypto_req_handle_t req) 841 { 842 843 /* EXPORT DELETE START */ 844 845 aes_ctx_t *aes_ctx; 846 int ret; 847 848 ASSERT(ctx->cc_provider_private != NULL); 849 aes_ctx = ctx->cc_provider_private; 850 851 if (data->cd_format != CRYPTO_DATA_RAW && 852 data->cd_format != CRYPTO_DATA_UIO && 853 data->cd_format != CRYPTO_DATA_MBLK) { 854 return (CRYPTO_ARGUMENTS_BAD); 855 } 856 857 if (aes_ctx->ac_flags & CTR_MODE) { 858 if (aes_ctx->ac_remainder_len > 0) { 859 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 860 aes_encrypt_block); 861 if (ret != CRYPTO_SUCCESS) 862 return (ret); 863 } 864 } else if (aes_ctx->ac_flags & CCM_MODE) { 865 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data, 866 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 867 if (ret != CRYPTO_SUCCESS) { 868 return (ret); 869 } 870 } else if (aes_ctx->ac_flags & GCM_MODE) { 871 size_t saved_offset = data->cd_offset; 872 873 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data, 874 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 875 aes_xor_block); 876 if (ret != CRYPTO_SUCCESS) { 877 return (ret); 878 } 879 data->cd_length = data->cd_offset - saved_offset; 880 data->cd_offset = saved_offset; 881 } else { 882 /* 883 * There must be no unprocessed plaintext. 884 * This happens if the length of the last data is 885 * not a multiple of the AES block length. 886 */ 887 if (aes_ctx->ac_remainder_len > 0) { 888 return (CRYPTO_DATA_LEN_RANGE); 889 } 890 data->cd_length = 0; 891 } 892 893 (void) aes_free_context(ctx); 894 895 /* EXPORT DELETE END */ 896 897 return (CRYPTO_SUCCESS); 898 } 899 900 /* ARGSUSED */ 901 static int 902 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data, 903 crypto_req_handle_t req) 904 { 905 906 /* EXPORT DELETE START */ 907 908 aes_ctx_t *aes_ctx; 909 int ret; 910 off_t saved_offset; 911 size_t saved_length; 912 913 ASSERT(ctx->cc_provider_private != NULL); 914 aes_ctx = ctx->cc_provider_private; 915 916 if (data->cd_format != CRYPTO_DATA_RAW && 917 data->cd_format != CRYPTO_DATA_UIO && 918 data->cd_format != CRYPTO_DATA_MBLK) { 919 return (CRYPTO_ARGUMENTS_BAD); 920 } 921 922 /* 923 * There must be no unprocessed ciphertext. 924 * This happens if the length of the last ciphertext is 925 * not a multiple of the AES block length. 926 */ 927 if (aes_ctx->ac_remainder_len > 0) { 928 if ((aes_ctx->ac_flags & CTR_MODE) == 0) 929 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 930 else { 931 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 932 aes_encrypt_block); 933 if (ret == CRYPTO_DATA_LEN_RANGE) 934 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 935 if (ret != CRYPTO_SUCCESS) 936 return (ret); 937 } 938 } 939 940 if (aes_ctx->ac_flags & CCM_MODE) { 941 /* 942 * This is where all the plaintext is returned, make sure 943 * the plaintext buffer is big enough 944 */ 945 size_t pt_len = aes_ctx->ac_data_len; 946 if (data->cd_length < pt_len) { 947 data->cd_length = pt_len; 948 return (CRYPTO_BUFFER_TOO_SMALL); 949 } 950 951 ASSERT(aes_ctx->ac_processed_data_len == pt_len); 952 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 953 saved_offset = data->cd_offset; 954 saved_length = data->cd_length; 955 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data, 956 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 957 aes_xor_block); 958 if (ret == CRYPTO_SUCCESS) { 959 data->cd_length = data->cd_offset - saved_offset; 960 } else { 961 data->cd_length = saved_length; 962 } 963 964 data->cd_offset = saved_offset; 965 if (ret != CRYPTO_SUCCESS) { 966 return (ret); 967 } 968 } else if (aes_ctx->ac_flags & GCM_MODE) { 969 /* 970 * This is where all the plaintext is returned, make sure 971 * the plaintext buffer is big enough 972 */ 973 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx; 974 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len; 975 976 if (data->cd_length < pt_len) { 977 data->cd_length = pt_len; 978 return (CRYPTO_BUFFER_TOO_SMALL); 979 } 980 981 saved_offset = data->cd_offset; 982 saved_length = data->cd_length; 983 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data, 984 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 985 if (ret == CRYPTO_SUCCESS) { 986 data->cd_length = data->cd_offset - saved_offset; 987 } else { 988 data->cd_length = saved_length; 989 } 990 991 data->cd_offset = saved_offset; 992 if (ret != CRYPTO_SUCCESS) { 993 return (ret); 994 } 995 } 996 997 998 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE)) == 0) { 999 data->cd_length = 0; 1000 } 1001 1002 (void) aes_free_context(ctx); 1003 1004 /* EXPORT DELETE END */ 1005 1006 return (CRYPTO_SUCCESS); 1007 } 1008 1009 /* ARGSUSED */ 1010 static int 1011 aes_encrypt_atomic(crypto_provider_handle_t provider, 1012 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1013 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext, 1014 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1015 { 1016 aes_ctx_t aes_ctx; /* on the stack */ 1017 off_t saved_offset; 1018 size_t saved_length; 1019 int ret; 1020 1021 AES_ARG_INPLACE(plaintext, ciphertext); 1022 1023 if ((mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) && 1024 (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)) { 1025 /* 1026 * Plaintext must be a multiple of AES block size. 1027 * This test only works for non-padded mechanisms 1028 * when blocksize is 2^N. 1029 */ 1030 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 1031 return (CRYPTO_DATA_LEN_RANGE); 1032 } 1033 1034 /* return length needed to store the output */ 1035 if (ciphertext->cd_length < plaintext->cd_length) { 1036 ciphertext->cd_length = plaintext->cd_length; 1037 return (CRYPTO_BUFFER_TOO_SMALL); 1038 } 1039 1040 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS) 1041 return (ret); 1042 1043 bzero(&aes_ctx, sizeof (aes_ctx_t)); 1044 1045 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 1046 crypto_kmflag(req), B_TRUE); 1047 if (ret != CRYPTO_SUCCESS) 1048 return (ret); 1049 1050 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1051 size_t length_needed 1052 = plaintext->cd_length + aes_ctx.ac_mac_len; 1053 if (ciphertext->cd_length < length_needed) { 1054 ciphertext->cd_length = length_needed; 1055 return (CRYPTO_BUFFER_TOO_SMALL); 1056 } 1057 } 1058 1059 1060 saved_offset = ciphertext->cd_offset; 1061 saved_length = ciphertext->cd_length; 1062 1063 /* 1064 * Do an update on the specified input data. 1065 */ 1066 switch (plaintext->cd_format) { 1067 case CRYPTO_DATA_RAW: 1068 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext, 1069 aes_encrypt_contiguous_blocks, aes_copy_block64); 1070 break; 1071 case CRYPTO_DATA_UIO: 1072 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext, 1073 aes_encrypt_contiguous_blocks, aes_copy_block64); 1074 break; 1075 case CRYPTO_DATA_MBLK: 1076 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext, 1077 aes_encrypt_contiguous_blocks, aes_copy_block64); 1078 break; 1079 default: 1080 ret = CRYPTO_ARGUMENTS_BAD; 1081 } 1082 1083 if (ret == CRYPTO_SUCCESS) { 1084 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1085 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx, 1086 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 1087 aes_xor_block); 1088 if (ret != CRYPTO_SUCCESS) 1089 goto out; 1090 ASSERT(aes_ctx.ac_remainder_len == 0); 1091 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE) { 1092 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx, 1093 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 1094 aes_copy_block, aes_xor_block); 1095 if (ret != CRYPTO_SUCCESS) 1096 goto out; 1097 ASSERT(aes_ctx.ac_remainder_len == 0); 1098 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) { 1099 if (aes_ctx.ac_remainder_len > 0) { 1100 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1101 ciphertext, aes_encrypt_block); 1102 if (ret != CRYPTO_SUCCESS) 1103 goto out; 1104 } 1105 } else { 1106 ASSERT(aes_ctx.ac_remainder_len == 0); 1107 } 1108 1109 if (plaintext != ciphertext) { 1110 ciphertext->cd_length = 1111 ciphertext->cd_offset - saved_offset; 1112 } 1113 } else { 1114 ciphertext->cd_length = saved_length; 1115 } 1116 ciphertext->cd_offset = saved_offset; 1117 1118 out: 1119 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1120 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1121 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1122 } 1123 1124 return (ret); 1125 } 1126 1127 /* ARGSUSED */ 1128 static int 1129 aes_decrypt_atomic(crypto_provider_handle_t provider, 1130 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1131 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext, 1132 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1133 { 1134 aes_ctx_t aes_ctx; /* on the stack */ 1135 off_t saved_offset; 1136 size_t saved_length; 1137 int ret; 1138 1139 AES_ARG_INPLACE(ciphertext, plaintext); 1140 1141 /* 1142 * For block ciphers, ciphertext must be a multiple of AES block size. 1143 * This test is only valid for non-padded mechanisms 1144 * when blocksize is 2^N 1145 * Even though AES CCM mode is a block cipher, it does not 1146 * require the plaintext to be a multiple of AES block size. 1147 * The length requirement for AES CCM mode will be checked 1148 * at init time 1149 */ 1150 if ((mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) && 1151 (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE) && 1152 ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)) 1153 return (CRYPTO_DATA_LEN_RANGE); 1154 1155 /* 1156 * return length needed to store the output, length requirement 1157 * for AES CCM mode can not be determined until later 1158 */ 1159 if ((plaintext->cd_length < ciphertext->cd_length) && 1160 (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)) { 1161 plaintext->cd_length = ciphertext->cd_length; 1162 return (CRYPTO_BUFFER_TOO_SMALL); 1163 } 1164 1165 1166 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS) 1167 return (ret); 1168 1169 bzero(&aes_ctx, sizeof (aes_ctx_t)); 1170 1171 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 1172 crypto_kmflag(req), B_FALSE); 1173 if (ret != CRYPTO_SUCCESS) 1174 return (ret); 1175 1176 /* check length requirement for AES CCM mode now */ 1177 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1178 if (plaintext->cd_length < aes_ctx.ac_data_len) { 1179 plaintext->cd_length = aes_ctx.ac_data_len; 1180 ret = CRYPTO_BUFFER_TOO_SMALL; 1181 goto out; 1182 } 1183 } 1184 1185 saved_offset = plaintext->cd_offset; 1186 saved_length = plaintext->cd_length; 1187 1188 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE) 1189 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req)); 1190 1191 /* 1192 * Do an update on the specified input data. 1193 */ 1194 switch (ciphertext->cd_format) { 1195 case CRYPTO_DATA_RAW: 1196 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext, 1197 aes_decrypt_contiguous_blocks, aes_copy_block64); 1198 break; 1199 case CRYPTO_DATA_UIO: 1200 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext, 1201 aes_decrypt_contiguous_blocks, aes_copy_block64); 1202 break; 1203 case CRYPTO_DATA_MBLK: 1204 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext, 1205 aes_decrypt_contiguous_blocks, aes_copy_block64); 1206 break; 1207 default: 1208 ret = CRYPTO_ARGUMENTS_BAD; 1209 } 1210 1211 if (ret == CRYPTO_SUCCESS) { 1212 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1213 ASSERT(aes_ctx.ac_processed_data_len 1214 == aes_ctx.ac_data_len); 1215 ASSERT(aes_ctx.ac_processed_mac_len 1216 == aes_ctx.ac_mac_len); 1217 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx, 1218 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1219 aes_copy_block, aes_xor_block); 1220 ASSERT(aes_ctx.ac_remainder_len == 0); 1221 if ((ret == CRYPTO_SUCCESS) && 1222 (ciphertext != plaintext)) { 1223 plaintext->cd_length = 1224 plaintext->cd_offset - saved_offset; 1225 } else { 1226 plaintext->cd_length = saved_length; 1227 } 1228 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE) { 1229 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx, 1230 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1231 aes_xor_block); 1232 ASSERT(aes_ctx.ac_remainder_len == 0); 1233 if ((ret == CRYPTO_SUCCESS) && 1234 (ciphertext != plaintext)) { 1235 plaintext->cd_length = 1236 plaintext->cd_offset - saved_offset; 1237 } else { 1238 plaintext->cd_length = saved_length; 1239 } 1240 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) { 1241 ASSERT(aes_ctx.ac_remainder_len == 0); 1242 if (ciphertext != plaintext) 1243 plaintext->cd_length = 1244 plaintext->cd_offset - saved_offset; 1245 } else { 1246 if (aes_ctx.ac_remainder_len > 0) { 1247 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1248 plaintext, aes_encrypt_block); 1249 if (ret == CRYPTO_DATA_LEN_RANGE) 1250 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 1251 if (ret != CRYPTO_SUCCESS) 1252 goto out; 1253 } 1254 if (ciphertext != plaintext) 1255 plaintext->cd_length = 1256 plaintext->cd_offset - saved_offset; 1257 } 1258 } else { 1259 plaintext->cd_length = saved_length; 1260 } 1261 plaintext->cd_offset = saved_offset; 1262 1263 out: 1264 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1265 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1266 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1267 } 1268 1269 if (aes_ctx.ac_flags & CCM_MODE) { 1270 if (aes_ctx.ac_pt_buf != NULL) { 1271 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len); 1272 } 1273 } else if (aes_ctx.ac_flags & GCM_MODE) { 1274 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) { 1275 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf, 1276 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len); 1277 } 1278 } 1279 1280 return (ret); 1281 } 1282 1283 /* 1284 * KCF software provider context template entry points. 1285 */ 1286 /* ARGSUSED */ 1287 static int 1288 aes_create_ctx_template(crypto_provider_handle_t provider, 1289 crypto_mechanism_t *mechanism, crypto_key_t *key, 1290 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req) 1291 { 1292 1293 /* EXPORT DELETE START */ 1294 1295 void *keysched; 1296 size_t size; 1297 int rv; 1298 1299 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE && 1300 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE && 1301 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE && 1302 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE) 1303 return (CRYPTO_MECHANISM_INVALID); 1304 1305 if ((keysched = aes_alloc_keysched(&size, 1306 crypto_kmflag(req))) == NULL) { 1307 return (CRYPTO_HOST_MEMORY); 1308 } 1309 1310 /* 1311 * Initialize key schedule. Key length information is stored 1312 * in the key. 1313 */ 1314 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1315 bzero(keysched, size); 1316 kmem_free(keysched, size); 1317 return (rv); 1318 } 1319 1320 *tmpl = keysched; 1321 *tmpl_size = size; 1322 1323 /* EXPORT DELETE END */ 1324 1325 return (CRYPTO_SUCCESS); 1326 } 1327 1328 /* ARGSUSED */ 1329 static int 1330 aes_free_context(crypto_ctx_t *ctx) 1331 { 1332 1333 /* EXPORT DELETE START */ 1334 1335 aes_ctx_t *aes_ctx = ctx->cc_provider_private; 1336 1337 if (aes_ctx != NULL) { 1338 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1339 ASSERT(aes_ctx->ac_keysched_len != 0); 1340 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len); 1341 kmem_free(aes_ctx->ac_keysched, 1342 aes_ctx->ac_keysched_len); 1343 } 1344 crypto_free_mode_ctx(aes_ctx); 1345 ctx->cc_provider_private = NULL; 1346 } 1347 1348 /* EXPORT DELETE END */ 1349 1350 return (CRYPTO_SUCCESS); 1351 } 1352 1353 /* ARGSUSED */ 1354 static int 1355 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template, 1356 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag, 1357 boolean_t is_encrypt_init) 1358 { 1359 int rv = CRYPTO_SUCCESS; 1360 1361 /* EXPORT DELETE START */ 1362 1363 void *keysched; 1364 size_t size; 1365 1366 if (template == NULL) { 1367 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL) 1368 return (CRYPTO_HOST_MEMORY); 1369 /* 1370 * Initialize key schedule. 1371 * Key length is stored in the key. 1372 */ 1373 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1374 kmem_free(keysched, size); 1375 return (rv); 1376 } 1377 1378 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE; 1379 aes_ctx->ac_keysched_len = size; 1380 } else { 1381 keysched = template; 1382 } 1383 aes_ctx->ac_keysched = keysched; 1384 1385 switch (mechanism->cm_type) { 1386 case AES_CBC_MECH_INFO_TYPE: 1387 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param, 1388 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64); 1389 break; 1390 case AES_CTR_MECH_INFO_TYPE: { 1391 CK_AES_CTR_PARAMS *pp; 1392 1393 if (mechanism->cm_param == NULL || 1394 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) { 1395 return (CRYPTO_MECHANISM_PARAM_INVALID); 1396 } 1397 pp = (CK_AES_CTR_PARAMS *)mechanism->cm_param; 1398 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits, 1399 pp->cb, aes_copy_block); 1400 break; 1401 } 1402 case AES_CCM_MECH_INFO_TYPE: 1403 if (mechanism->cm_param == NULL || 1404 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) { 1405 return (CRYPTO_MECHANISM_PARAM_INVALID); 1406 } 1407 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param, 1408 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block, 1409 aes_xor_block); 1410 break; 1411 case AES_GCM_MECH_INFO_TYPE: 1412 if (mechanism->cm_param == NULL || 1413 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) { 1414 return (CRYPTO_MECHANISM_PARAM_INVALID); 1415 } 1416 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1417 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1418 aes_xor_block); 1419 break; 1420 case AES_ECB_MECH_INFO_TYPE: 1421 aes_ctx->ac_flags |= ECB_MODE; 1422 } 1423 1424 if (rv != CRYPTO_SUCCESS) { 1425 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1426 bzero(keysched, size); 1427 kmem_free(keysched, size); 1428 } 1429 } 1430 1431 /* EXPORT DELETE END */ 1432 1433 return (rv); 1434 } 1435