1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved. 23 */ 24 25 /* 26 * AES provider for the Kernel Cryptographic Framework (KCF) 27 */ 28 29 #include <sys/zfs_context.h> 30 #include <sys/crypto/common.h> 31 #include <sys/crypto/impl.h> 32 #include <sys/crypto/spi.h> 33 #include <sys/crypto/icp.h> 34 #include <modes/modes.h> 35 #include <sys/modctl.h> 36 #define _AES_IMPL 37 #include <aes/aes_impl.h> 38 #include <modes/gcm_impl.h> 39 40 #define CRYPTO_PROVIDER_NAME "aes" 41 42 extern struct mod_ops mod_cryptoops; 43 44 /* 45 * Module linkage information for the kernel. 46 */ 47 static struct modlcrypto modlcrypto = { 48 &mod_cryptoops, 49 "AES Kernel SW Provider" 50 }; 51 52 static struct modlinkage modlinkage = { 53 MODREV_1, { (void *)&modlcrypto, NULL } 54 }; 55 56 /* 57 * Mechanism info structure passed to KCF during registration. 58 */ 59 static crypto_mech_info_t aes_mech_info_tab[] = { 60 /* AES_ECB */ 61 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE, 62 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 63 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 64 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 65 /* AES_CBC */ 66 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE, 67 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 68 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 69 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 70 /* AES_CTR */ 71 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE, 72 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 73 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 74 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 75 /* AES_CCM */ 76 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE, 77 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 78 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 79 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 80 /* AES_GCM */ 81 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE, 82 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 83 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 84 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 85 /* AES_GMAC */ 86 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE, 87 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 88 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC | 89 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC | 90 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC | 91 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC, 92 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES} 93 }; 94 95 static void aes_provider_status(crypto_provider_handle_t, uint_t *); 96 97 static crypto_control_ops_t aes_control_ops = { 98 aes_provider_status 99 }; 100 101 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 102 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 103 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 104 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 105 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *, 106 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t); 107 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *, 108 crypto_mechanism_t *, crypto_key_t *, int, boolean_t); 109 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *, 110 crypto_req_handle_t); 111 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *, 112 crypto_req_handle_t); 113 114 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 115 crypto_req_handle_t); 116 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *, 117 crypto_data_t *, crypto_req_handle_t); 118 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t, 119 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 120 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 121 122 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 123 crypto_req_handle_t); 124 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *, 125 crypto_data_t *, crypto_req_handle_t); 126 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t, 127 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 128 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 129 130 static crypto_cipher_ops_t aes_cipher_ops = { 131 .encrypt_init = aes_encrypt_init, 132 .encrypt = aes_encrypt, 133 .encrypt_update = aes_encrypt_update, 134 .encrypt_final = aes_encrypt_final, 135 .encrypt_atomic = aes_encrypt_atomic, 136 .decrypt_init = aes_decrypt_init, 137 .decrypt = aes_decrypt, 138 .decrypt_update = aes_decrypt_update, 139 .decrypt_final = aes_decrypt_final, 140 .decrypt_atomic = aes_decrypt_atomic 141 }; 142 143 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t, 144 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *, 145 crypto_spi_ctx_template_t, crypto_req_handle_t); 146 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t, 147 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *, 148 crypto_spi_ctx_template_t, crypto_req_handle_t); 149 150 static crypto_mac_ops_t aes_mac_ops = { 151 .mac_init = NULL, 152 .mac = NULL, 153 .mac_update = NULL, 154 .mac_final = NULL, 155 .mac_atomic = aes_mac_atomic, 156 .mac_verify_atomic = aes_mac_verify_atomic 157 }; 158 159 static int aes_create_ctx_template(crypto_provider_handle_t, 160 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *, 161 size_t *, crypto_req_handle_t); 162 static int aes_free_context(crypto_ctx_t *); 163 164 static crypto_ctx_ops_t aes_ctx_ops = { 165 .create_ctx_template = aes_create_ctx_template, 166 .free_context = aes_free_context 167 }; 168 169 static crypto_ops_t aes_crypto_ops = {{{{{ 170 &aes_control_ops, 171 NULL, 172 &aes_cipher_ops, 173 &aes_mac_ops, 174 NULL, 175 NULL, 176 NULL, 177 NULL, 178 NULL, 179 NULL, 180 NULL, 181 NULL, 182 NULL, 183 &aes_ctx_ops 184 }}}}}; 185 186 static crypto_provider_info_t aes_prov_info = {{{{ 187 CRYPTO_SPI_VERSION_1, 188 "AES Software Provider", 189 CRYPTO_SW_PROVIDER, 190 NULL, 191 &aes_crypto_ops, 192 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t), 193 aes_mech_info_tab 194 }}}}; 195 196 static crypto_kcf_provider_handle_t aes_prov_handle = 0; 197 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW }; 198 199 int 200 aes_mod_init(void) 201 { 202 int ret; 203 204 /* Determine the fastest available implementation. */ 205 aes_impl_init(); 206 gcm_impl_init(); 207 208 if ((ret = mod_install(&modlinkage)) != 0) 209 return (ret); 210 211 /* Register with KCF. If the registration fails, remove the module. */ 212 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) { 213 (void) mod_remove(&modlinkage); 214 return (EACCES); 215 } 216 217 return (0); 218 } 219 220 int 221 aes_mod_fini(void) 222 { 223 /* Unregister from KCF if module is registered */ 224 if (aes_prov_handle != 0) { 225 if (crypto_unregister_provider(aes_prov_handle)) 226 return (EBUSY); 227 228 aes_prov_handle = 0; 229 } 230 231 return (mod_remove(&modlinkage)); 232 } 233 234 static int 235 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag) 236 { 237 void *p = NULL; 238 boolean_t param_required = B_TRUE; 239 size_t param_len; 240 void *(*alloc_fun)(int); 241 int rv = CRYPTO_SUCCESS; 242 243 switch (mechanism->cm_type) { 244 case AES_ECB_MECH_INFO_TYPE: 245 param_required = B_FALSE; 246 alloc_fun = ecb_alloc_ctx; 247 break; 248 case AES_CBC_MECH_INFO_TYPE: 249 param_len = AES_BLOCK_LEN; 250 alloc_fun = cbc_alloc_ctx; 251 break; 252 case AES_CTR_MECH_INFO_TYPE: 253 param_len = sizeof (CK_AES_CTR_PARAMS); 254 alloc_fun = ctr_alloc_ctx; 255 break; 256 case AES_CCM_MECH_INFO_TYPE: 257 param_len = sizeof (CK_AES_CCM_PARAMS); 258 alloc_fun = ccm_alloc_ctx; 259 break; 260 case AES_GCM_MECH_INFO_TYPE: 261 param_len = sizeof (CK_AES_GCM_PARAMS); 262 alloc_fun = gcm_alloc_ctx; 263 break; 264 case AES_GMAC_MECH_INFO_TYPE: 265 param_len = sizeof (CK_AES_GMAC_PARAMS); 266 alloc_fun = gmac_alloc_ctx; 267 break; 268 default: 269 rv = CRYPTO_MECHANISM_INVALID; 270 return (rv); 271 } 272 if (param_required && mechanism->cm_param != NULL && 273 mechanism->cm_param_len != param_len) { 274 rv = CRYPTO_MECHANISM_PARAM_INVALID; 275 } 276 if (ctx != NULL) { 277 p = (alloc_fun)(kmflag); 278 *ctx = p; 279 } 280 return (rv); 281 } 282 283 /* 284 * Initialize key schedules for AES 285 */ 286 static int 287 init_keysched(crypto_key_t *key, void *newbie) 288 { 289 /* 290 * Only keys by value are supported by this module. 291 */ 292 switch (key->ck_format) { 293 case CRYPTO_KEY_RAW: 294 if (key->ck_length < AES_MINBITS || 295 key->ck_length > AES_MAXBITS) { 296 return (CRYPTO_KEY_SIZE_RANGE); 297 } 298 299 /* key length must be either 128, 192, or 256 */ 300 if ((key->ck_length & 63) != 0) 301 return (CRYPTO_KEY_SIZE_RANGE); 302 break; 303 default: 304 return (CRYPTO_KEY_TYPE_INCONSISTENT); 305 } 306 307 aes_init_keysched(key->ck_data, key->ck_length, newbie); 308 return (CRYPTO_SUCCESS); 309 } 310 311 /* 312 * KCF software provider control entry points. 313 */ 314 /* ARGSUSED */ 315 static void 316 aes_provider_status(crypto_provider_handle_t provider, uint_t *status) 317 { 318 *status = CRYPTO_PROVIDER_READY; 319 } 320 321 static int 322 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 323 crypto_key_t *key, crypto_spi_ctx_template_t template, 324 crypto_req_handle_t req) 325 { 326 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE)); 327 } 328 329 static int 330 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 331 crypto_key_t *key, crypto_spi_ctx_template_t template, 332 crypto_req_handle_t req) 333 { 334 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE)); 335 } 336 337 338 339 /* 340 * KCF software provider encrypt entry points. 341 */ 342 static int 343 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 344 crypto_key_t *key, crypto_spi_ctx_template_t template, 345 crypto_req_handle_t req, boolean_t is_encrypt_init) 346 { 347 aes_ctx_t *aes_ctx; 348 int rv; 349 int kmflag; 350 351 /* 352 * Only keys by value are supported by this module. 353 */ 354 if (key->ck_format != CRYPTO_KEY_RAW) { 355 return (CRYPTO_KEY_TYPE_INCONSISTENT); 356 } 357 358 kmflag = crypto_kmflag(req); 359 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag)) 360 != CRYPTO_SUCCESS) 361 return (rv); 362 363 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag, 364 is_encrypt_init); 365 if (rv != CRYPTO_SUCCESS) { 366 crypto_free_mode_ctx(aes_ctx); 367 return (rv); 368 } 369 370 ctx->cc_provider_private = aes_ctx; 371 372 return (CRYPTO_SUCCESS); 373 } 374 375 static void 376 aes_copy_block64(uint8_t *in, uint64_t *out) 377 { 378 if (IS_P2ALIGNED(in, sizeof (uint64_t))) { 379 /* LINTED: pointer alignment */ 380 out[0] = *(uint64_t *)&in[0]; 381 /* LINTED: pointer alignment */ 382 out[1] = *(uint64_t *)&in[8]; 383 } else { 384 uint8_t *iv8 = (uint8_t *)&out[0]; 385 386 AES_COPY_BLOCK(in, iv8); 387 } 388 } 389 390 391 static int 392 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext, 393 crypto_data_t *ciphertext, crypto_req_handle_t req) 394 { 395 int ret = CRYPTO_FAILED; 396 397 aes_ctx_t *aes_ctx; 398 size_t saved_length, saved_offset, length_needed; 399 400 ASSERT(ctx->cc_provider_private != NULL); 401 aes_ctx = ctx->cc_provider_private; 402 403 /* 404 * For block ciphers, plaintext must be a multiple of AES block size. 405 * This test is only valid for ciphers whose blocksize is a power of 2. 406 */ 407 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) 408 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 409 return (CRYPTO_DATA_LEN_RANGE); 410 411 ASSERT(ciphertext != NULL); 412 413 /* 414 * We need to just return the length needed to store the output. 415 * We should not destroy the context for the following case. 416 */ 417 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) { 418 case CCM_MODE: 419 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len; 420 break; 421 case GCM_MODE: 422 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len; 423 break; 424 case GMAC_MODE: 425 if (plaintext->cd_length != 0) 426 return (CRYPTO_ARGUMENTS_BAD); 427 428 length_needed = aes_ctx->ac_tag_len; 429 break; 430 default: 431 length_needed = plaintext->cd_length; 432 } 433 434 if (ciphertext->cd_length < length_needed) { 435 ciphertext->cd_length = length_needed; 436 return (CRYPTO_BUFFER_TOO_SMALL); 437 } 438 439 saved_length = ciphertext->cd_length; 440 saved_offset = ciphertext->cd_offset; 441 442 /* 443 * Do an update on the specified input data. 444 */ 445 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req); 446 if (ret != CRYPTO_SUCCESS) { 447 return (ret); 448 } 449 450 /* 451 * For CCM mode, aes_ccm_encrypt_final() will take care of any 452 * left-over unprocessed data, and compute the MAC 453 */ 454 if (aes_ctx->ac_flags & CCM_MODE) { 455 /* 456 * ccm_encrypt_final() will compute the MAC and append 457 * it to existing ciphertext. So, need to adjust the left over 458 * length value accordingly 459 */ 460 461 /* order of following 2 lines MUST not be reversed */ 462 ciphertext->cd_offset = ciphertext->cd_length; 463 ciphertext->cd_length = saved_length - ciphertext->cd_length; 464 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext, 465 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 466 if (ret != CRYPTO_SUCCESS) { 467 return (ret); 468 } 469 470 if (plaintext != ciphertext) { 471 ciphertext->cd_length = 472 ciphertext->cd_offset - saved_offset; 473 } 474 ciphertext->cd_offset = saved_offset; 475 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 476 /* 477 * gcm_encrypt_final() will compute the MAC and append 478 * it to existing ciphertext. So, need to adjust the left over 479 * length value accordingly 480 */ 481 482 /* order of following 2 lines MUST not be reversed */ 483 ciphertext->cd_offset = ciphertext->cd_length; 484 ciphertext->cd_length = saved_length - ciphertext->cd_length; 485 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext, 486 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 487 aes_xor_block); 488 if (ret != CRYPTO_SUCCESS) { 489 return (ret); 490 } 491 492 if (plaintext != ciphertext) { 493 ciphertext->cd_length = 494 ciphertext->cd_offset - saved_offset; 495 } 496 ciphertext->cd_offset = saved_offset; 497 } 498 499 ASSERT(aes_ctx->ac_remainder_len == 0); 500 (void) aes_free_context(ctx); 501 502 return (ret); 503 } 504 505 506 static int 507 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 508 crypto_data_t *plaintext, crypto_req_handle_t req) 509 { 510 int ret = CRYPTO_FAILED; 511 512 aes_ctx_t *aes_ctx; 513 off_t saved_offset; 514 size_t saved_length, length_needed; 515 516 ASSERT(ctx->cc_provider_private != NULL); 517 aes_ctx = ctx->cc_provider_private; 518 519 /* 520 * For block ciphers, plaintext must be a multiple of AES block size. 521 * This test is only valid for ciphers whose blocksize is a power of 2. 522 */ 523 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) 524 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) { 525 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 526 } 527 528 ASSERT(plaintext != NULL); 529 530 /* 531 * Return length needed to store the output. 532 * Do not destroy context when plaintext buffer is too small. 533 * 534 * CCM: plaintext is MAC len smaller than cipher text 535 * GCM: plaintext is TAG len smaller than cipher text 536 * GMAC: plaintext length must be zero 537 */ 538 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) { 539 case CCM_MODE: 540 length_needed = aes_ctx->ac_processed_data_len; 541 break; 542 case GCM_MODE: 543 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len; 544 break; 545 case GMAC_MODE: 546 if (plaintext->cd_length != 0) 547 return (CRYPTO_ARGUMENTS_BAD); 548 549 length_needed = 0; 550 break; 551 default: 552 length_needed = ciphertext->cd_length; 553 } 554 555 if (plaintext->cd_length < length_needed) { 556 plaintext->cd_length = length_needed; 557 return (CRYPTO_BUFFER_TOO_SMALL); 558 } 559 560 saved_offset = plaintext->cd_offset; 561 saved_length = plaintext->cd_length; 562 563 /* 564 * Do an update on the specified input data. 565 */ 566 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req); 567 if (ret != CRYPTO_SUCCESS) { 568 goto cleanup; 569 } 570 571 if (aes_ctx->ac_flags & CCM_MODE) { 572 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len); 573 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 574 575 /* order of following 2 lines MUST not be reversed */ 576 plaintext->cd_offset = plaintext->cd_length; 577 plaintext->cd_length = saved_length - plaintext->cd_length; 578 579 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext, 580 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 581 aes_xor_block); 582 if (ret == CRYPTO_SUCCESS) { 583 if (plaintext != ciphertext) { 584 plaintext->cd_length = 585 plaintext->cd_offset - saved_offset; 586 } 587 } else { 588 plaintext->cd_length = saved_length; 589 } 590 591 plaintext->cd_offset = saved_offset; 592 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 593 /* order of following 2 lines MUST not be reversed */ 594 plaintext->cd_offset = plaintext->cd_length; 595 plaintext->cd_length = saved_length - plaintext->cd_length; 596 597 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext, 598 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 599 if (ret == CRYPTO_SUCCESS) { 600 if (plaintext != ciphertext) { 601 plaintext->cd_length = 602 plaintext->cd_offset - saved_offset; 603 } 604 } else { 605 plaintext->cd_length = saved_length; 606 } 607 608 plaintext->cd_offset = saved_offset; 609 } 610 611 ASSERT(aes_ctx->ac_remainder_len == 0); 612 613 cleanup: 614 (void) aes_free_context(ctx); 615 616 return (ret); 617 } 618 619 620 /* ARGSUSED */ 621 static int 622 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext, 623 crypto_data_t *ciphertext, crypto_req_handle_t req) 624 { 625 off_t saved_offset; 626 size_t saved_length, out_len; 627 int ret = CRYPTO_SUCCESS; 628 aes_ctx_t *aes_ctx; 629 630 ASSERT(ctx->cc_provider_private != NULL); 631 aes_ctx = ctx->cc_provider_private; 632 633 ASSERT(ciphertext != NULL); 634 635 /* compute number of bytes that will hold the ciphertext */ 636 out_len = aes_ctx->ac_remainder_len; 637 out_len += plaintext->cd_length; 638 out_len &= ~(AES_BLOCK_LEN - 1); 639 640 /* return length needed to store the output */ 641 if (ciphertext->cd_length < out_len) { 642 ciphertext->cd_length = out_len; 643 return (CRYPTO_BUFFER_TOO_SMALL); 644 } 645 646 saved_offset = ciphertext->cd_offset; 647 saved_length = ciphertext->cd_length; 648 649 /* 650 * Do the AES update on the specified input data. 651 */ 652 switch (plaintext->cd_format) { 653 case CRYPTO_DATA_RAW: 654 ret = crypto_update_iov(ctx->cc_provider_private, 655 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 656 aes_copy_block64); 657 break; 658 case CRYPTO_DATA_UIO: 659 ret = crypto_update_uio(ctx->cc_provider_private, 660 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 661 aes_copy_block64); 662 break; 663 default: 664 ret = CRYPTO_ARGUMENTS_BAD; 665 } 666 667 /* 668 * Since AES counter mode is a stream cipher, we call 669 * ctr_mode_final() to pick up any remaining bytes. 670 * It is an internal function that does not destroy 671 * the context like *normal* final routines. 672 */ 673 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 674 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, 675 ciphertext, aes_encrypt_block); 676 } 677 678 if (ret == CRYPTO_SUCCESS) { 679 if (plaintext != ciphertext) 680 ciphertext->cd_length = 681 ciphertext->cd_offset - saved_offset; 682 } else { 683 ciphertext->cd_length = saved_length; 684 } 685 ciphertext->cd_offset = saved_offset; 686 687 return (ret); 688 } 689 690 691 static int 692 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 693 crypto_data_t *plaintext, crypto_req_handle_t req) 694 { 695 off_t saved_offset; 696 size_t saved_length, out_len; 697 int ret = CRYPTO_SUCCESS; 698 aes_ctx_t *aes_ctx; 699 700 ASSERT(ctx->cc_provider_private != NULL); 701 aes_ctx = ctx->cc_provider_private; 702 703 ASSERT(plaintext != NULL); 704 705 /* 706 * Compute number of bytes that will hold the plaintext. 707 * This is not necessary for CCM, GCM, and GMAC since these 708 * mechanisms never return plaintext for update operations. 709 */ 710 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) { 711 out_len = aes_ctx->ac_remainder_len; 712 out_len += ciphertext->cd_length; 713 out_len &= ~(AES_BLOCK_LEN - 1); 714 715 /* return length needed to store the output */ 716 if (plaintext->cd_length < out_len) { 717 plaintext->cd_length = out_len; 718 return (CRYPTO_BUFFER_TOO_SMALL); 719 } 720 } 721 722 saved_offset = plaintext->cd_offset; 723 saved_length = plaintext->cd_length; 724 725 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) 726 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req)); 727 728 /* 729 * Do the AES update on the specified input data. 730 */ 731 switch (ciphertext->cd_format) { 732 case CRYPTO_DATA_RAW: 733 ret = crypto_update_iov(ctx->cc_provider_private, 734 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 735 aes_copy_block64); 736 break; 737 case CRYPTO_DATA_UIO: 738 ret = crypto_update_uio(ctx->cc_provider_private, 739 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 740 aes_copy_block64); 741 break; 742 default: 743 ret = CRYPTO_ARGUMENTS_BAD; 744 } 745 746 /* 747 * Since AES counter mode is a stream cipher, we call 748 * ctr_mode_final() to pick up any remaining bytes. 749 * It is an internal function that does not destroy 750 * the context like *normal* final routines. 751 */ 752 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 753 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext, 754 aes_encrypt_block); 755 if (ret == CRYPTO_DATA_LEN_RANGE) 756 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 757 } 758 759 if (ret == CRYPTO_SUCCESS) { 760 if (ciphertext != plaintext) 761 plaintext->cd_length = 762 plaintext->cd_offset - saved_offset; 763 } else { 764 plaintext->cd_length = saved_length; 765 } 766 plaintext->cd_offset = saved_offset; 767 768 769 return (ret); 770 } 771 772 /* ARGSUSED */ 773 static int 774 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data, 775 crypto_req_handle_t req) 776 { 777 aes_ctx_t *aes_ctx; 778 int ret; 779 780 ASSERT(ctx->cc_provider_private != NULL); 781 aes_ctx = ctx->cc_provider_private; 782 783 if (data->cd_format != CRYPTO_DATA_RAW && 784 data->cd_format != CRYPTO_DATA_UIO) { 785 return (CRYPTO_ARGUMENTS_BAD); 786 } 787 788 if (aes_ctx->ac_flags & CTR_MODE) { 789 if (aes_ctx->ac_remainder_len > 0) { 790 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 791 aes_encrypt_block); 792 if (ret != CRYPTO_SUCCESS) 793 return (ret); 794 } 795 } else if (aes_ctx->ac_flags & CCM_MODE) { 796 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data, 797 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 798 if (ret != CRYPTO_SUCCESS) { 799 return (ret); 800 } 801 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 802 size_t saved_offset = data->cd_offset; 803 804 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data, 805 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 806 aes_xor_block); 807 if (ret != CRYPTO_SUCCESS) { 808 return (ret); 809 } 810 data->cd_length = data->cd_offset - saved_offset; 811 data->cd_offset = saved_offset; 812 } else { 813 /* 814 * There must be no unprocessed plaintext. 815 * This happens if the length of the last data is 816 * not a multiple of the AES block length. 817 */ 818 if (aes_ctx->ac_remainder_len > 0) { 819 return (CRYPTO_DATA_LEN_RANGE); 820 } 821 data->cd_length = 0; 822 } 823 824 (void) aes_free_context(ctx); 825 826 return (CRYPTO_SUCCESS); 827 } 828 829 /* ARGSUSED */ 830 static int 831 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data, 832 crypto_req_handle_t req) 833 { 834 aes_ctx_t *aes_ctx; 835 int ret; 836 off_t saved_offset; 837 size_t saved_length; 838 839 ASSERT(ctx->cc_provider_private != NULL); 840 aes_ctx = ctx->cc_provider_private; 841 842 if (data->cd_format != CRYPTO_DATA_RAW && 843 data->cd_format != CRYPTO_DATA_UIO) { 844 return (CRYPTO_ARGUMENTS_BAD); 845 } 846 847 /* 848 * There must be no unprocessed ciphertext. 849 * This happens if the length of the last ciphertext is 850 * not a multiple of the AES block length. 851 */ 852 if (aes_ctx->ac_remainder_len > 0) { 853 if ((aes_ctx->ac_flags & CTR_MODE) == 0) 854 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 855 else { 856 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 857 aes_encrypt_block); 858 if (ret == CRYPTO_DATA_LEN_RANGE) 859 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 860 if (ret != CRYPTO_SUCCESS) 861 return (ret); 862 } 863 } 864 865 if (aes_ctx->ac_flags & CCM_MODE) { 866 /* 867 * This is where all the plaintext is returned, make sure 868 * the plaintext buffer is big enough 869 */ 870 size_t pt_len = aes_ctx->ac_data_len; 871 if (data->cd_length < pt_len) { 872 data->cd_length = pt_len; 873 return (CRYPTO_BUFFER_TOO_SMALL); 874 } 875 876 ASSERT(aes_ctx->ac_processed_data_len == pt_len); 877 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 878 saved_offset = data->cd_offset; 879 saved_length = data->cd_length; 880 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data, 881 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 882 aes_xor_block); 883 if (ret == CRYPTO_SUCCESS) { 884 data->cd_length = data->cd_offset - saved_offset; 885 } else { 886 data->cd_length = saved_length; 887 } 888 889 data->cd_offset = saved_offset; 890 if (ret != CRYPTO_SUCCESS) { 891 return (ret); 892 } 893 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 894 /* 895 * This is where all the plaintext is returned, make sure 896 * the plaintext buffer is big enough 897 */ 898 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx; 899 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len; 900 901 if (data->cd_length < pt_len) { 902 data->cd_length = pt_len; 903 return (CRYPTO_BUFFER_TOO_SMALL); 904 } 905 906 saved_offset = data->cd_offset; 907 saved_length = data->cd_length; 908 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data, 909 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 910 if (ret == CRYPTO_SUCCESS) { 911 data->cd_length = data->cd_offset - saved_offset; 912 } else { 913 data->cd_length = saved_length; 914 } 915 916 data->cd_offset = saved_offset; 917 if (ret != CRYPTO_SUCCESS) { 918 return (ret); 919 } 920 } 921 922 923 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) { 924 data->cd_length = 0; 925 } 926 927 (void) aes_free_context(ctx); 928 929 return (CRYPTO_SUCCESS); 930 } 931 932 /* ARGSUSED */ 933 static int 934 aes_encrypt_atomic(crypto_provider_handle_t provider, 935 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 936 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext, 937 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 938 { 939 aes_ctx_t aes_ctx; /* on the stack */ 940 off_t saved_offset; 941 size_t saved_length; 942 size_t length_needed; 943 int ret; 944 945 ASSERT(ciphertext != NULL); 946 947 /* 948 * CTR, CCM, GCM, and GMAC modes do not require that plaintext 949 * be a multiple of AES block size. 950 */ 951 switch (mechanism->cm_type) { 952 case AES_CTR_MECH_INFO_TYPE: 953 case AES_CCM_MECH_INFO_TYPE: 954 case AES_GCM_MECH_INFO_TYPE: 955 case AES_GMAC_MECH_INFO_TYPE: 956 break; 957 default: 958 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 959 return (CRYPTO_DATA_LEN_RANGE); 960 } 961 962 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS) 963 return (ret); 964 965 bzero(&aes_ctx, sizeof (aes_ctx_t)); 966 967 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 968 crypto_kmflag(req), B_TRUE); 969 if (ret != CRYPTO_SUCCESS) 970 return (ret); 971 972 switch (mechanism->cm_type) { 973 case AES_CCM_MECH_INFO_TYPE: 974 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len; 975 break; 976 case AES_GMAC_MECH_INFO_TYPE: 977 if (plaintext->cd_length != 0) 978 return (CRYPTO_ARGUMENTS_BAD); 979 /* FALLTHRU */ 980 case AES_GCM_MECH_INFO_TYPE: 981 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len; 982 break; 983 default: 984 length_needed = plaintext->cd_length; 985 } 986 987 /* return size of buffer needed to store output */ 988 if (ciphertext->cd_length < length_needed) { 989 ciphertext->cd_length = length_needed; 990 ret = CRYPTO_BUFFER_TOO_SMALL; 991 goto out; 992 } 993 994 saved_offset = ciphertext->cd_offset; 995 saved_length = ciphertext->cd_length; 996 997 /* 998 * Do an update on the specified input data. 999 */ 1000 switch (plaintext->cd_format) { 1001 case CRYPTO_DATA_RAW: 1002 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext, 1003 aes_encrypt_contiguous_blocks, aes_copy_block64); 1004 break; 1005 case CRYPTO_DATA_UIO: 1006 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext, 1007 aes_encrypt_contiguous_blocks, aes_copy_block64); 1008 break; 1009 default: 1010 ret = CRYPTO_ARGUMENTS_BAD; 1011 } 1012 1013 if (ret == CRYPTO_SUCCESS) { 1014 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1015 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx, 1016 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 1017 aes_xor_block); 1018 if (ret != CRYPTO_SUCCESS) 1019 goto out; 1020 ASSERT(aes_ctx.ac_remainder_len == 0); 1021 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1022 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1023 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx, 1024 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 1025 aes_copy_block, aes_xor_block); 1026 if (ret != CRYPTO_SUCCESS) 1027 goto out; 1028 ASSERT(aes_ctx.ac_remainder_len == 0); 1029 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) { 1030 if (aes_ctx.ac_remainder_len > 0) { 1031 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1032 ciphertext, aes_encrypt_block); 1033 if (ret != CRYPTO_SUCCESS) 1034 goto out; 1035 } 1036 } else { 1037 ASSERT(aes_ctx.ac_remainder_len == 0); 1038 } 1039 1040 if (plaintext != ciphertext) { 1041 ciphertext->cd_length = 1042 ciphertext->cd_offset - saved_offset; 1043 } 1044 } else { 1045 ciphertext->cd_length = saved_length; 1046 } 1047 ciphertext->cd_offset = saved_offset; 1048 1049 out: 1050 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1051 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1052 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1053 } 1054 #ifdef CAN_USE_GCM_ASM 1055 if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE) && 1056 ((gcm_ctx_t *)&aes_ctx)->gcm_Htable != NULL) { 1057 1058 gcm_ctx_t *ctx = (gcm_ctx_t *)&aes_ctx; 1059 1060 bzero(ctx->gcm_Htable, ctx->gcm_htab_len); 1061 kmem_free(ctx->gcm_Htable, ctx->gcm_htab_len); 1062 } 1063 #endif 1064 1065 return (ret); 1066 } 1067 1068 /* ARGSUSED */ 1069 static int 1070 aes_decrypt_atomic(crypto_provider_handle_t provider, 1071 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1072 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext, 1073 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1074 { 1075 aes_ctx_t aes_ctx; /* on the stack */ 1076 off_t saved_offset; 1077 size_t saved_length; 1078 size_t length_needed; 1079 int ret; 1080 1081 ASSERT(plaintext != NULL); 1082 1083 /* 1084 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext 1085 * be a multiple of AES block size. 1086 */ 1087 switch (mechanism->cm_type) { 1088 case AES_CTR_MECH_INFO_TYPE: 1089 case AES_CCM_MECH_INFO_TYPE: 1090 case AES_GCM_MECH_INFO_TYPE: 1091 case AES_GMAC_MECH_INFO_TYPE: 1092 break; 1093 default: 1094 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 1095 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 1096 } 1097 1098 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS) 1099 return (ret); 1100 1101 bzero(&aes_ctx, sizeof (aes_ctx_t)); 1102 1103 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 1104 crypto_kmflag(req), B_FALSE); 1105 if (ret != CRYPTO_SUCCESS) 1106 return (ret); 1107 1108 switch (mechanism->cm_type) { 1109 case AES_CCM_MECH_INFO_TYPE: 1110 length_needed = aes_ctx.ac_data_len; 1111 break; 1112 case AES_GCM_MECH_INFO_TYPE: 1113 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len; 1114 break; 1115 case AES_GMAC_MECH_INFO_TYPE: 1116 if (plaintext->cd_length != 0) 1117 return (CRYPTO_ARGUMENTS_BAD); 1118 length_needed = 0; 1119 break; 1120 default: 1121 length_needed = ciphertext->cd_length; 1122 } 1123 1124 /* return size of buffer needed to store output */ 1125 if (plaintext->cd_length < length_needed) { 1126 plaintext->cd_length = length_needed; 1127 ret = CRYPTO_BUFFER_TOO_SMALL; 1128 goto out; 1129 } 1130 1131 saved_offset = plaintext->cd_offset; 1132 saved_length = plaintext->cd_length; 1133 1134 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1135 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) 1136 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req)); 1137 1138 /* 1139 * Do an update on the specified input data. 1140 */ 1141 switch (ciphertext->cd_format) { 1142 case CRYPTO_DATA_RAW: 1143 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext, 1144 aes_decrypt_contiguous_blocks, aes_copy_block64); 1145 break; 1146 case CRYPTO_DATA_UIO: 1147 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext, 1148 aes_decrypt_contiguous_blocks, aes_copy_block64); 1149 break; 1150 default: 1151 ret = CRYPTO_ARGUMENTS_BAD; 1152 } 1153 1154 if (ret == CRYPTO_SUCCESS) { 1155 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1156 ASSERT(aes_ctx.ac_processed_data_len 1157 == aes_ctx.ac_data_len); 1158 ASSERT(aes_ctx.ac_processed_mac_len 1159 == aes_ctx.ac_mac_len); 1160 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx, 1161 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1162 aes_copy_block, aes_xor_block); 1163 ASSERT(aes_ctx.ac_remainder_len == 0); 1164 if ((ret == CRYPTO_SUCCESS) && 1165 (ciphertext != plaintext)) { 1166 plaintext->cd_length = 1167 plaintext->cd_offset - saved_offset; 1168 } else { 1169 plaintext->cd_length = saved_length; 1170 } 1171 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1172 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1173 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx, 1174 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1175 aes_xor_block); 1176 ASSERT(aes_ctx.ac_remainder_len == 0); 1177 if ((ret == CRYPTO_SUCCESS) && 1178 (ciphertext != plaintext)) { 1179 plaintext->cd_length = 1180 plaintext->cd_offset - saved_offset; 1181 } else { 1182 plaintext->cd_length = saved_length; 1183 } 1184 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) { 1185 ASSERT(aes_ctx.ac_remainder_len == 0); 1186 if (ciphertext != plaintext) 1187 plaintext->cd_length = 1188 plaintext->cd_offset - saved_offset; 1189 } else { 1190 if (aes_ctx.ac_remainder_len > 0) { 1191 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1192 plaintext, aes_encrypt_block); 1193 if (ret == CRYPTO_DATA_LEN_RANGE) 1194 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 1195 if (ret != CRYPTO_SUCCESS) 1196 goto out; 1197 } 1198 if (ciphertext != plaintext) 1199 plaintext->cd_length = 1200 plaintext->cd_offset - saved_offset; 1201 } 1202 } else { 1203 plaintext->cd_length = saved_length; 1204 } 1205 plaintext->cd_offset = saved_offset; 1206 1207 out: 1208 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1209 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1210 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1211 } 1212 1213 if (aes_ctx.ac_flags & CCM_MODE) { 1214 if (aes_ctx.ac_pt_buf != NULL) { 1215 vmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len); 1216 } 1217 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) { 1218 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) { 1219 vmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf, 1220 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len); 1221 } 1222 #ifdef CAN_USE_GCM_ASM 1223 if (((gcm_ctx_t *)&aes_ctx)->gcm_Htable != NULL) { 1224 gcm_ctx_t *ctx = (gcm_ctx_t *)&aes_ctx; 1225 1226 bzero(ctx->gcm_Htable, ctx->gcm_htab_len); 1227 kmem_free(ctx->gcm_Htable, ctx->gcm_htab_len); 1228 } 1229 #endif 1230 } 1231 1232 return (ret); 1233 } 1234 1235 /* 1236 * KCF software provider context template entry points. 1237 */ 1238 /* ARGSUSED */ 1239 static int 1240 aes_create_ctx_template(crypto_provider_handle_t provider, 1241 crypto_mechanism_t *mechanism, crypto_key_t *key, 1242 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req) 1243 { 1244 void *keysched; 1245 size_t size; 1246 int rv; 1247 1248 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE && 1249 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE && 1250 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE && 1251 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE && 1252 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE && 1253 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE) 1254 return (CRYPTO_MECHANISM_INVALID); 1255 1256 if ((keysched = aes_alloc_keysched(&size, 1257 crypto_kmflag(req))) == NULL) { 1258 return (CRYPTO_HOST_MEMORY); 1259 } 1260 1261 /* 1262 * Initialize key schedule. Key length information is stored 1263 * in the key. 1264 */ 1265 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1266 bzero(keysched, size); 1267 kmem_free(keysched, size); 1268 return (rv); 1269 } 1270 1271 *tmpl = keysched; 1272 *tmpl_size = size; 1273 1274 return (CRYPTO_SUCCESS); 1275 } 1276 1277 1278 static int 1279 aes_free_context(crypto_ctx_t *ctx) 1280 { 1281 aes_ctx_t *aes_ctx = ctx->cc_provider_private; 1282 1283 if (aes_ctx != NULL) { 1284 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1285 ASSERT(aes_ctx->ac_keysched_len != 0); 1286 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len); 1287 kmem_free(aes_ctx->ac_keysched, 1288 aes_ctx->ac_keysched_len); 1289 } 1290 crypto_free_mode_ctx(aes_ctx); 1291 ctx->cc_provider_private = NULL; 1292 } 1293 1294 return (CRYPTO_SUCCESS); 1295 } 1296 1297 1298 static int 1299 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template, 1300 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag, 1301 boolean_t is_encrypt_init) 1302 { 1303 int rv = CRYPTO_SUCCESS; 1304 void *keysched; 1305 size_t size = 0; 1306 1307 if (template == NULL) { 1308 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL) 1309 return (CRYPTO_HOST_MEMORY); 1310 /* 1311 * Initialize key schedule. 1312 * Key length is stored in the key. 1313 */ 1314 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1315 kmem_free(keysched, size); 1316 return (rv); 1317 } 1318 1319 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE; 1320 aes_ctx->ac_keysched_len = size; 1321 } else { 1322 keysched = template; 1323 } 1324 aes_ctx->ac_keysched = keysched; 1325 1326 switch (mechanism->cm_type) { 1327 case AES_CBC_MECH_INFO_TYPE: 1328 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param, 1329 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64); 1330 break; 1331 case AES_CTR_MECH_INFO_TYPE: { 1332 CK_AES_CTR_PARAMS *pp; 1333 1334 if (mechanism->cm_param == NULL || 1335 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) { 1336 return (CRYPTO_MECHANISM_PARAM_INVALID); 1337 } 1338 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param; 1339 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits, 1340 pp->cb, aes_copy_block); 1341 break; 1342 } 1343 case AES_CCM_MECH_INFO_TYPE: 1344 if (mechanism->cm_param == NULL || 1345 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) { 1346 return (CRYPTO_MECHANISM_PARAM_INVALID); 1347 } 1348 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param, 1349 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block, 1350 aes_xor_block); 1351 break; 1352 case AES_GCM_MECH_INFO_TYPE: 1353 if (mechanism->cm_param == NULL || 1354 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) { 1355 return (CRYPTO_MECHANISM_PARAM_INVALID); 1356 } 1357 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1358 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1359 aes_xor_block); 1360 break; 1361 case AES_GMAC_MECH_INFO_TYPE: 1362 if (mechanism->cm_param == NULL || 1363 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) { 1364 return (CRYPTO_MECHANISM_PARAM_INVALID); 1365 } 1366 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1367 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1368 aes_xor_block); 1369 break; 1370 case AES_ECB_MECH_INFO_TYPE: 1371 aes_ctx->ac_flags |= ECB_MODE; 1372 } 1373 1374 if (rv != CRYPTO_SUCCESS) { 1375 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1376 bzero(keysched, size); 1377 kmem_free(keysched, size); 1378 } 1379 } 1380 1381 return (rv); 1382 } 1383 1384 static int 1385 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data, 1386 CK_AES_GCM_PARAMS *gcm_params) 1387 { 1388 /* LINTED: pointer alignment */ 1389 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param; 1390 1391 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE) 1392 return (CRYPTO_MECHANISM_INVALID); 1393 1394 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) 1395 return (CRYPTO_MECHANISM_PARAM_INVALID); 1396 1397 if (params->pIv == NULL) 1398 return (CRYPTO_MECHANISM_PARAM_INVALID); 1399 1400 gcm_params->pIv = params->pIv; 1401 gcm_params->ulIvLen = AES_GMAC_IV_LEN; 1402 gcm_params->ulTagBits = AES_GMAC_TAG_BITS; 1403 1404 if (data == NULL) 1405 return (CRYPTO_SUCCESS); 1406 1407 if (data->cd_format != CRYPTO_DATA_RAW) 1408 return (CRYPTO_ARGUMENTS_BAD); 1409 1410 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base; 1411 gcm_params->ulAADLen = data->cd_length; 1412 return (CRYPTO_SUCCESS); 1413 } 1414 1415 static int 1416 aes_mac_atomic(crypto_provider_handle_t provider, 1417 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1418 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1419 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1420 { 1421 CK_AES_GCM_PARAMS gcm_params; 1422 crypto_mechanism_t gcm_mech; 1423 int rv; 1424 1425 if ((rv = process_gmac_mech(mechanism, data, &gcm_params)) 1426 != CRYPTO_SUCCESS) 1427 return (rv); 1428 1429 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE; 1430 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS); 1431 gcm_mech.cm_param = (char *)&gcm_params; 1432 1433 return (aes_encrypt_atomic(provider, session_id, &gcm_mech, 1434 key, &null_crypto_data, mac, template, req)); 1435 } 1436 1437 static int 1438 aes_mac_verify_atomic(crypto_provider_handle_t provider, 1439 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1440 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1441 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1442 { 1443 CK_AES_GCM_PARAMS gcm_params; 1444 crypto_mechanism_t gcm_mech; 1445 int rv; 1446 1447 if ((rv = process_gmac_mech(mechanism, data, &gcm_params)) 1448 != CRYPTO_SUCCESS) 1449 return (rv); 1450 1451 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE; 1452 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS); 1453 gcm_mech.cm_param = (char *)&gcm_params; 1454 1455 return (aes_decrypt_atomic(provider, session_id, &gcm_mech, 1456 key, mac, &null_crypto_data, template, req)); 1457 } 1458