1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2009 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 26 /* 27 * AES provider for the Kernel Cryptographic Framework (KCF) 28 */ 29 30 #include <sys/types.h> 31 #include <sys/systm.h> 32 #include <sys/modctl.h> 33 #include <sys/cmn_err.h> 34 #include <sys/ddi.h> 35 #include <sys/crypto/common.h> 36 #include <sys/crypto/impl.h> 37 #include <sys/crypto/spi.h> 38 #include <sys/sysmacros.h> 39 #include <sys/strsun.h> 40 #include <modes/modes.h> 41 #include <aes/aes_impl.h> 42 43 extern struct mod_ops mod_cryptoops; 44 45 /* 46 * Module linkage information for the kernel. 47 */ 48 static struct modlcrypto modlcrypto = { 49 &mod_cryptoops, 50 "AES Kernel SW Provider" 51 }; 52 53 static struct modlinkage modlinkage = { 54 MODREV_1, 55 (void *)&modlcrypto, 56 NULL 57 }; 58 59 /* 60 * CSPI information (entry points, provider info, etc.) 61 */ 62 typedef enum aes_mech_type { 63 AES_ECB_MECH_INFO_TYPE, /* SUN_CKM_AES_ECB */ 64 AES_CBC_MECH_INFO_TYPE, /* SUN_CKM_AES_CBC */ 65 AES_CBC_PAD_MECH_INFO_TYPE, /* SUN_CKM_AES_CBC_PAD */ 66 AES_CTR_MECH_INFO_TYPE, /* SUN_CKM_AES_CTR */ 67 AES_CCM_MECH_INFO_TYPE, /* SUN_CKM_AES_CCM */ 68 AES_GCM_MECH_INFO_TYPE, /* SUN_CKM_AES_GCM */ 69 AES_GMAC_MECH_INFO_TYPE /* SUN_CKM_AES_GMAC */ 70 } aes_mech_type_t; 71 72 /* 73 * The following definitions are to keep EXPORT_SRC happy. 74 */ 75 #ifndef AES_MIN_KEY_BYTES 76 #define AES_MIN_KEY_BYTES 0 77 #endif 78 79 #ifndef AES_MAX_KEY_BYTES 80 #define AES_MAX_KEY_BYTES 0 81 #endif 82 83 /* 84 * Mechanism info structure passed to KCF during registration. 85 */ 86 static crypto_mech_info_t aes_mech_info_tab[] = { 87 /* AES_ECB */ 88 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE, 89 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 90 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 91 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 92 /* AES_CBC */ 93 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE, 94 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 95 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 96 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 97 /* AES_CTR */ 98 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE, 99 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 100 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 101 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 102 /* AES_CCM */ 103 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE, 104 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 105 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 106 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 107 /* AES_GCM */ 108 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE, 109 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 110 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 111 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 112 /* AES_GMAC */ 113 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE, 114 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 115 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC | 116 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC | 117 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC | 118 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC, 119 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES} 120 }; 121 122 /* operations are in-place if the output buffer is NULL */ 123 #define AES_ARG_INPLACE(input, output) \ 124 if ((output) == NULL) \ 125 (output) = (input); 126 127 static void aes_provider_status(crypto_provider_handle_t, uint_t *); 128 129 static crypto_control_ops_t aes_control_ops = { 130 aes_provider_status 131 }; 132 133 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 134 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 135 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 136 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 137 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *, 138 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t); 139 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *, 140 crypto_mechanism_t *, crypto_key_t *, int, boolean_t); 141 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *, 142 crypto_req_handle_t); 143 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *, 144 crypto_req_handle_t); 145 146 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 147 crypto_req_handle_t); 148 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *, 149 crypto_data_t *, crypto_req_handle_t); 150 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t, 151 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 152 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 153 154 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 155 crypto_req_handle_t); 156 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *, 157 crypto_data_t *, crypto_req_handle_t); 158 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t, 159 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 160 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 161 162 static crypto_cipher_ops_t aes_cipher_ops = { 163 aes_encrypt_init, 164 aes_encrypt, 165 aes_encrypt_update, 166 aes_encrypt_final, 167 aes_encrypt_atomic, 168 aes_decrypt_init, 169 aes_decrypt, 170 aes_decrypt_update, 171 aes_decrypt_final, 172 aes_decrypt_atomic 173 }; 174 175 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t, 176 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *, 177 crypto_spi_ctx_template_t, crypto_req_handle_t); 178 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t, 179 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *, 180 crypto_spi_ctx_template_t, crypto_req_handle_t); 181 182 static crypto_mac_ops_t aes_mac_ops = { 183 NULL, 184 NULL, 185 NULL, 186 NULL, 187 aes_mac_atomic, 188 aes_mac_verify_atomic 189 }; 190 191 static int aes_create_ctx_template(crypto_provider_handle_t, 192 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *, 193 size_t *, crypto_req_handle_t); 194 static int aes_free_context(crypto_ctx_t *); 195 196 static crypto_ctx_ops_t aes_ctx_ops = { 197 aes_create_ctx_template, 198 aes_free_context 199 }; 200 201 static crypto_ops_t aes_crypto_ops = { 202 &aes_control_ops, 203 NULL, 204 &aes_cipher_ops, 205 &aes_mac_ops, 206 NULL, 207 NULL, 208 NULL, 209 NULL, 210 NULL, 211 NULL, 212 NULL, 213 NULL, 214 NULL, 215 &aes_ctx_ops 216 }; 217 218 static crypto_provider_info_t aes_prov_info = { 219 CRYPTO_SPI_VERSION_1, 220 "AES Software Provider", 221 CRYPTO_SW_PROVIDER, 222 {&modlinkage}, 223 NULL, 224 &aes_crypto_ops, 225 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t), 226 aes_mech_info_tab 227 }; 228 229 static crypto_kcf_provider_handle_t aes_prov_handle = NULL; 230 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW }; 231 232 int 233 _init(void) 234 { 235 int ret; 236 237 /* 238 * Register with KCF. If the registration fails, return error. 239 */ 240 if ((ret = crypto_register_provider(&aes_prov_info, 241 &aes_prov_handle)) != CRYPTO_SUCCESS) { 242 cmn_err(CE_WARN, "%s _init: crypto_register_provider()" 243 "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret); 244 return (EACCES); 245 } 246 247 if ((ret = mod_install(&modlinkage)) != 0) { 248 int rv; 249 250 ASSERT(aes_prov_handle != NULL); 251 /* We should not return if the unregister returns busy. */ 252 while ((rv = crypto_unregister_provider(aes_prov_handle)) 253 == CRYPTO_BUSY) { 254 cmn_err(CE_WARN, 255 "%s _init: crypto_unregister_provider() " 256 "failed (0x%x). Retrying.", 257 CRYPTO_PROVIDER_NAME, rv); 258 /* wait 10 seconds and try again. */ 259 delay(10 * drv_usectohz(1000000)); 260 } 261 } 262 263 return (ret); 264 } 265 266 int 267 _fini(void) 268 { 269 int ret; 270 271 /* 272 * Unregister from KCF if previous registration succeeded. 273 */ 274 if (aes_prov_handle != NULL) { 275 if ((ret = crypto_unregister_provider(aes_prov_handle)) != 276 CRYPTO_SUCCESS) { 277 cmn_err(CE_WARN, 278 "%s _fini: crypto_unregister_provider() " 279 "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret); 280 return (EBUSY); 281 } 282 aes_prov_handle = NULL; 283 } 284 285 return (mod_remove(&modlinkage)); 286 } 287 288 int 289 _info(struct modinfo *modinfop) 290 { 291 return (mod_info(&modlinkage, modinfop)); 292 } 293 294 295 static int 296 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag) 297 { 298 void *p = NULL; 299 boolean_t param_required = B_TRUE; 300 size_t param_len; 301 void *(*alloc_fun)(int); 302 int rv = CRYPTO_SUCCESS; 303 304 switch (mechanism->cm_type) { 305 case AES_ECB_MECH_INFO_TYPE: 306 param_required = B_FALSE; 307 alloc_fun = ecb_alloc_ctx; 308 break; 309 case AES_CBC_MECH_INFO_TYPE: 310 param_len = AES_BLOCK_LEN; 311 alloc_fun = cbc_alloc_ctx; 312 break; 313 case AES_CTR_MECH_INFO_TYPE: 314 param_len = sizeof (CK_AES_CTR_PARAMS); 315 alloc_fun = ctr_alloc_ctx; 316 break; 317 case AES_CCM_MECH_INFO_TYPE: 318 param_len = sizeof (CK_AES_CCM_PARAMS); 319 alloc_fun = ccm_alloc_ctx; 320 break; 321 case AES_GCM_MECH_INFO_TYPE: 322 param_len = sizeof (CK_AES_GCM_PARAMS); 323 alloc_fun = gcm_alloc_ctx; 324 break; 325 case AES_GMAC_MECH_INFO_TYPE: 326 param_len = sizeof (CK_AES_GMAC_PARAMS); 327 alloc_fun = gmac_alloc_ctx; 328 break; 329 default: 330 rv = CRYPTO_MECHANISM_INVALID; 331 } 332 if (param_required && mechanism->cm_param != NULL && 333 mechanism->cm_param_len != param_len) { 334 rv = CRYPTO_MECHANISM_PARAM_INVALID; 335 } 336 if (ctx != NULL) { 337 p = (alloc_fun)(kmflag); 338 *ctx = p; 339 } 340 return (rv); 341 } 342 343 /* EXPORT DELETE START */ 344 345 /* 346 * Initialize key schedules for AES 347 */ 348 static int 349 init_keysched(crypto_key_t *key, void *newbie) 350 { 351 /* 352 * Only keys by value are supported by this module. 353 */ 354 switch (key->ck_format) { 355 case CRYPTO_KEY_RAW: 356 if (key->ck_length < AES_MINBITS || 357 key->ck_length > AES_MAXBITS) { 358 return (CRYPTO_KEY_SIZE_RANGE); 359 } 360 361 /* key length must be either 128, 192, or 256 */ 362 if ((key->ck_length & 63) != 0) 363 return (CRYPTO_KEY_SIZE_RANGE); 364 break; 365 default: 366 return (CRYPTO_KEY_TYPE_INCONSISTENT); 367 } 368 369 aes_init_keysched(key->ck_data, key->ck_length, newbie); 370 return (CRYPTO_SUCCESS); 371 } 372 373 /* EXPORT DELETE END */ 374 375 /* 376 * KCF software provider control entry points. 377 */ 378 /* ARGSUSED */ 379 static void 380 aes_provider_status(crypto_provider_handle_t provider, uint_t *status) 381 { 382 *status = CRYPTO_PROVIDER_READY; 383 } 384 385 static int 386 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 387 crypto_key_t *key, crypto_spi_ctx_template_t template, 388 crypto_req_handle_t req) { 389 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE)); 390 } 391 392 static int 393 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 394 crypto_key_t *key, crypto_spi_ctx_template_t template, 395 crypto_req_handle_t req) { 396 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE)); 397 } 398 399 400 401 /* 402 * KCF software provider encrypt entry points. 403 */ 404 static int 405 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 406 crypto_key_t *key, crypto_spi_ctx_template_t template, 407 crypto_req_handle_t req, boolean_t is_encrypt_init) 408 { 409 410 /* EXPORT DELETE START */ 411 412 aes_ctx_t *aes_ctx; 413 int rv; 414 int kmflag; 415 416 /* 417 * Only keys by value are supported by this module. 418 */ 419 if (key->ck_format != CRYPTO_KEY_RAW) { 420 return (CRYPTO_KEY_TYPE_INCONSISTENT); 421 } 422 423 kmflag = crypto_kmflag(req); 424 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag)) 425 != CRYPTO_SUCCESS) 426 return (rv); 427 428 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag, 429 is_encrypt_init); 430 if (rv != CRYPTO_SUCCESS) { 431 crypto_free_mode_ctx(aes_ctx); 432 return (rv); 433 } 434 435 ctx->cc_provider_private = aes_ctx; 436 437 /* EXPORT DELETE END */ 438 439 return (CRYPTO_SUCCESS); 440 } 441 442 static void 443 aes_copy_block64(uint8_t *in, uint64_t *out) 444 { 445 if (IS_P2ALIGNED(in, sizeof (uint64_t))) { 446 /* LINTED: pointer alignment */ 447 out[0] = *(uint64_t *)&in[0]; 448 /* LINTED: pointer alignment */ 449 out[1] = *(uint64_t *)&in[8]; 450 } else { 451 uint8_t *iv8 = (uint8_t *)&out[0]; 452 453 AES_COPY_BLOCK(in, iv8); 454 } 455 } 456 457 /* ARGSUSED */ 458 static int 459 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext, 460 crypto_data_t *ciphertext, crypto_req_handle_t req) 461 { 462 int ret = CRYPTO_FAILED; 463 464 /* EXPORT DELETE START */ 465 466 aes_ctx_t *aes_ctx; 467 size_t saved_length, saved_offset, length_needed; 468 469 ASSERT(ctx->cc_provider_private != NULL); 470 aes_ctx = ctx->cc_provider_private; 471 472 /* 473 * For block ciphers, plaintext must be a multiple of AES block size. 474 * This test is only valid for ciphers whose blocksize is a power of 2. 475 */ 476 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) 477 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 478 return (CRYPTO_DATA_LEN_RANGE); 479 480 AES_ARG_INPLACE(plaintext, ciphertext); 481 482 /* 483 * We need to just return the length needed to store the output. 484 * We should not destroy the context for the following case. 485 */ 486 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) { 487 case CCM_MODE: 488 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len; 489 break; 490 case GCM_MODE: 491 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len; 492 break; 493 case GMAC_MODE: 494 if (plaintext->cd_length != 0) 495 return (CRYPTO_ARGUMENTS_BAD); 496 497 length_needed = aes_ctx->ac_tag_len; 498 break; 499 default: 500 length_needed = plaintext->cd_length; 501 } 502 503 if (ciphertext->cd_length < length_needed) { 504 ciphertext->cd_length = length_needed; 505 return (CRYPTO_BUFFER_TOO_SMALL); 506 } 507 508 saved_length = ciphertext->cd_length; 509 saved_offset = ciphertext->cd_offset; 510 511 /* 512 * Do an update on the specified input data. 513 */ 514 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req); 515 if (ret != CRYPTO_SUCCESS) { 516 return (ret); 517 } 518 519 /* 520 * For CCM mode, aes_ccm_encrypt_final() will take care of any 521 * left-over unprocessed data, and compute the MAC 522 */ 523 if (aes_ctx->ac_flags & CCM_MODE) { 524 /* 525 * ccm_encrypt_final() will compute the MAC and append 526 * it to existing ciphertext. So, need to adjust the left over 527 * length value accordingly 528 */ 529 530 /* order of following 2 lines MUST not be reversed */ 531 ciphertext->cd_offset = ciphertext->cd_length; 532 ciphertext->cd_length = saved_length - ciphertext->cd_length; 533 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext, 534 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 535 if (ret != CRYPTO_SUCCESS) { 536 return (ret); 537 } 538 539 if (plaintext != ciphertext) { 540 ciphertext->cd_length = 541 ciphertext->cd_offset - saved_offset; 542 } 543 ciphertext->cd_offset = saved_offset; 544 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 545 /* 546 * gcm_encrypt_final() will compute the MAC and append 547 * it to existing ciphertext. So, need to adjust the left over 548 * length value accordingly 549 */ 550 551 /* order of following 2 lines MUST not be reversed */ 552 ciphertext->cd_offset = ciphertext->cd_length; 553 ciphertext->cd_length = saved_length - ciphertext->cd_length; 554 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext, 555 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 556 aes_xor_block); 557 if (ret != CRYPTO_SUCCESS) { 558 return (ret); 559 } 560 561 if (plaintext != ciphertext) { 562 ciphertext->cd_length = 563 ciphertext->cd_offset - saved_offset; 564 } 565 ciphertext->cd_offset = saved_offset; 566 } 567 568 ASSERT(aes_ctx->ac_remainder_len == 0); 569 (void) aes_free_context(ctx); 570 571 /* EXPORT DELETE END */ 572 573 /* LINTED */ 574 return (ret); 575 } 576 577 /* ARGSUSED */ 578 static int 579 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 580 crypto_data_t *plaintext, crypto_req_handle_t req) 581 { 582 int ret = CRYPTO_FAILED; 583 584 /* EXPORT DELETE START */ 585 586 aes_ctx_t *aes_ctx; 587 off_t saved_offset; 588 size_t saved_length, length_needed; 589 590 ASSERT(ctx->cc_provider_private != NULL); 591 aes_ctx = ctx->cc_provider_private; 592 593 /* 594 * For block ciphers, plaintext must be a multiple of AES block size. 595 * This test is only valid for ciphers whose blocksize is a power of 2. 596 */ 597 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) 598 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) { 599 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 600 } 601 602 AES_ARG_INPLACE(ciphertext, plaintext); 603 604 /* 605 * Return length needed to store the output. 606 * Do not destroy context when plaintext buffer is too small. 607 * 608 * CCM: plaintext is MAC len smaller than cipher text 609 * GCM: plaintext is TAG len smaller than cipher text 610 * GMAC: plaintext length must be zero 611 */ 612 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) { 613 case CCM_MODE: 614 length_needed = aes_ctx->ac_processed_data_len; 615 break; 616 case GCM_MODE: 617 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len; 618 break; 619 case GMAC_MODE: 620 if (plaintext->cd_length != 0) 621 return (CRYPTO_ARGUMENTS_BAD); 622 623 length_needed = 0; 624 break; 625 default: 626 length_needed = ciphertext->cd_length; 627 } 628 629 if (plaintext->cd_length < length_needed) { 630 plaintext->cd_length = length_needed; 631 return (CRYPTO_BUFFER_TOO_SMALL); 632 } 633 634 saved_offset = plaintext->cd_offset; 635 saved_length = plaintext->cd_length; 636 637 /* 638 * Do an update on the specified input data. 639 */ 640 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req); 641 if (ret != CRYPTO_SUCCESS) { 642 goto cleanup; 643 } 644 645 if (aes_ctx->ac_flags & CCM_MODE) { 646 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len); 647 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 648 649 /* order of following 2 lines MUST not be reversed */ 650 plaintext->cd_offset = plaintext->cd_length; 651 plaintext->cd_length = saved_length - plaintext->cd_length; 652 653 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext, 654 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 655 aes_xor_block); 656 if (ret == CRYPTO_SUCCESS) { 657 if (plaintext != ciphertext) { 658 plaintext->cd_length = 659 plaintext->cd_offset - saved_offset; 660 } 661 } else { 662 plaintext->cd_length = saved_length; 663 } 664 665 plaintext->cd_offset = saved_offset; 666 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 667 /* order of following 2 lines MUST not be reversed */ 668 plaintext->cd_offset = plaintext->cd_length; 669 plaintext->cd_length = saved_length - plaintext->cd_length; 670 671 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext, 672 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 673 if (ret == CRYPTO_SUCCESS) { 674 if (plaintext != ciphertext) { 675 plaintext->cd_length = 676 plaintext->cd_offset - saved_offset; 677 } 678 } else { 679 plaintext->cd_length = saved_length; 680 } 681 682 plaintext->cd_offset = saved_offset; 683 } 684 685 ASSERT(aes_ctx->ac_remainder_len == 0); 686 687 cleanup: 688 (void) aes_free_context(ctx); 689 690 /* EXPORT DELETE END */ 691 692 /* LINTED */ 693 return (ret); 694 } 695 696 /* ARGSUSED */ 697 static int 698 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext, 699 crypto_data_t *ciphertext, crypto_req_handle_t req) 700 { 701 off_t saved_offset; 702 size_t saved_length, out_len; 703 int ret = CRYPTO_SUCCESS; 704 aes_ctx_t *aes_ctx; 705 706 ASSERT(ctx->cc_provider_private != NULL); 707 aes_ctx = ctx->cc_provider_private; 708 709 AES_ARG_INPLACE(plaintext, ciphertext); 710 711 /* compute number of bytes that will hold the ciphertext */ 712 out_len = aes_ctx->ac_remainder_len; 713 out_len += plaintext->cd_length; 714 out_len &= ~(AES_BLOCK_LEN - 1); 715 716 /* return length needed to store the output */ 717 if (ciphertext->cd_length < out_len) { 718 ciphertext->cd_length = out_len; 719 return (CRYPTO_BUFFER_TOO_SMALL); 720 } 721 722 saved_offset = ciphertext->cd_offset; 723 saved_length = ciphertext->cd_length; 724 725 /* 726 * Do the AES update on the specified input data. 727 */ 728 switch (plaintext->cd_format) { 729 case CRYPTO_DATA_RAW: 730 ret = crypto_update_iov(ctx->cc_provider_private, 731 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 732 aes_copy_block64); 733 break; 734 case CRYPTO_DATA_UIO: 735 ret = crypto_update_uio(ctx->cc_provider_private, 736 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 737 aes_copy_block64); 738 break; 739 case CRYPTO_DATA_MBLK: 740 ret = crypto_update_mp(ctx->cc_provider_private, 741 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 742 aes_copy_block64); 743 break; 744 default: 745 ret = CRYPTO_ARGUMENTS_BAD; 746 } 747 748 /* 749 * Since AES counter mode is a stream cipher, we call 750 * ctr_mode_final() to pick up any remaining bytes. 751 * It is an internal function that does not destroy 752 * the context like *normal* final routines. 753 */ 754 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 755 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, 756 ciphertext, aes_encrypt_block); 757 } 758 759 if (ret == CRYPTO_SUCCESS) { 760 if (plaintext != ciphertext) 761 ciphertext->cd_length = 762 ciphertext->cd_offset - saved_offset; 763 } else { 764 ciphertext->cd_length = saved_length; 765 } 766 ciphertext->cd_offset = saved_offset; 767 768 return (ret); 769 } 770 771 /* ARGSUSED */ 772 static int 773 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 774 crypto_data_t *plaintext, crypto_req_handle_t req) 775 { 776 off_t saved_offset; 777 size_t saved_length, out_len; 778 int ret = CRYPTO_SUCCESS; 779 aes_ctx_t *aes_ctx; 780 781 ASSERT(ctx->cc_provider_private != NULL); 782 aes_ctx = ctx->cc_provider_private; 783 784 AES_ARG_INPLACE(ciphertext, plaintext); 785 786 /* 787 * Compute number of bytes that will hold the plaintext. 788 * This is not necessary for CCM, GCM, and GMAC since these 789 * mechanisms never return plaintext for update operations. 790 */ 791 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) { 792 out_len = aes_ctx->ac_remainder_len; 793 out_len += ciphertext->cd_length; 794 out_len &= ~(AES_BLOCK_LEN - 1); 795 796 /* return length needed to store the output */ 797 if (plaintext->cd_length < out_len) { 798 plaintext->cd_length = out_len; 799 return (CRYPTO_BUFFER_TOO_SMALL); 800 } 801 } 802 803 saved_offset = plaintext->cd_offset; 804 saved_length = plaintext->cd_length; 805 806 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) 807 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req)); 808 809 /* 810 * Do the AES update on the specified input data. 811 */ 812 switch (ciphertext->cd_format) { 813 case CRYPTO_DATA_RAW: 814 ret = crypto_update_iov(ctx->cc_provider_private, 815 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 816 aes_copy_block64); 817 break; 818 case CRYPTO_DATA_UIO: 819 ret = crypto_update_uio(ctx->cc_provider_private, 820 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 821 aes_copy_block64); 822 break; 823 case CRYPTO_DATA_MBLK: 824 ret = crypto_update_mp(ctx->cc_provider_private, 825 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 826 aes_copy_block64); 827 break; 828 default: 829 ret = CRYPTO_ARGUMENTS_BAD; 830 } 831 832 /* 833 * Since AES counter mode is a stream cipher, we call 834 * ctr_mode_final() to pick up any remaining bytes. 835 * It is an internal function that does not destroy 836 * the context like *normal* final routines. 837 */ 838 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 839 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext, 840 aes_encrypt_block); 841 if (ret == CRYPTO_DATA_LEN_RANGE) 842 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 843 } 844 845 if (ret == CRYPTO_SUCCESS) { 846 if (ciphertext != plaintext) 847 plaintext->cd_length = 848 plaintext->cd_offset - saved_offset; 849 } else { 850 plaintext->cd_length = saved_length; 851 } 852 plaintext->cd_offset = saved_offset; 853 854 855 return (ret); 856 } 857 858 /* ARGSUSED */ 859 static int 860 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data, 861 crypto_req_handle_t req) 862 { 863 864 /* EXPORT DELETE START */ 865 866 aes_ctx_t *aes_ctx; 867 int ret; 868 869 ASSERT(ctx->cc_provider_private != NULL); 870 aes_ctx = ctx->cc_provider_private; 871 872 if (data->cd_format != CRYPTO_DATA_RAW && 873 data->cd_format != CRYPTO_DATA_UIO && 874 data->cd_format != CRYPTO_DATA_MBLK) { 875 return (CRYPTO_ARGUMENTS_BAD); 876 } 877 878 if (aes_ctx->ac_flags & CTR_MODE) { 879 if (aes_ctx->ac_remainder_len > 0) { 880 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 881 aes_encrypt_block); 882 if (ret != CRYPTO_SUCCESS) 883 return (ret); 884 } 885 } else if (aes_ctx->ac_flags & CCM_MODE) { 886 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data, 887 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 888 if (ret != CRYPTO_SUCCESS) { 889 return (ret); 890 } 891 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 892 size_t saved_offset = data->cd_offset; 893 894 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data, 895 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 896 aes_xor_block); 897 if (ret != CRYPTO_SUCCESS) { 898 return (ret); 899 } 900 data->cd_length = data->cd_offset - saved_offset; 901 data->cd_offset = saved_offset; 902 } else { 903 /* 904 * There must be no unprocessed plaintext. 905 * This happens if the length of the last data is 906 * not a multiple of the AES block length. 907 */ 908 if (aes_ctx->ac_remainder_len > 0) { 909 return (CRYPTO_DATA_LEN_RANGE); 910 } 911 data->cd_length = 0; 912 } 913 914 (void) aes_free_context(ctx); 915 916 /* EXPORT DELETE END */ 917 918 return (CRYPTO_SUCCESS); 919 } 920 921 /* ARGSUSED */ 922 static int 923 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data, 924 crypto_req_handle_t req) 925 { 926 927 /* EXPORT DELETE START */ 928 929 aes_ctx_t *aes_ctx; 930 int ret; 931 off_t saved_offset; 932 size_t saved_length; 933 934 ASSERT(ctx->cc_provider_private != NULL); 935 aes_ctx = ctx->cc_provider_private; 936 937 if (data->cd_format != CRYPTO_DATA_RAW && 938 data->cd_format != CRYPTO_DATA_UIO && 939 data->cd_format != CRYPTO_DATA_MBLK) { 940 return (CRYPTO_ARGUMENTS_BAD); 941 } 942 943 /* 944 * There must be no unprocessed ciphertext. 945 * This happens if the length of the last ciphertext is 946 * not a multiple of the AES block length. 947 */ 948 if (aes_ctx->ac_remainder_len > 0) { 949 if ((aes_ctx->ac_flags & CTR_MODE) == 0) 950 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 951 else { 952 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 953 aes_encrypt_block); 954 if (ret == CRYPTO_DATA_LEN_RANGE) 955 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 956 if (ret != CRYPTO_SUCCESS) 957 return (ret); 958 } 959 } 960 961 if (aes_ctx->ac_flags & CCM_MODE) { 962 /* 963 * This is where all the plaintext is returned, make sure 964 * the plaintext buffer is big enough 965 */ 966 size_t pt_len = aes_ctx->ac_data_len; 967 if (data->cd_length < pt_len) { 968 data->cd_length = pt_len; 969 return (CRYPTO_BUFFER_TOO_SMALL); 970 } 971 972 ASSERT(aes_ctx->ac_processed_data_len == pt_len); 973 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 974 saved_offset = data->cd_offset; 975 saved_length = data->cd_length; 976 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data, 977 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 978 aes_xor_block); 979 if (ret == CRYPTO_SUCCESS) { 980 data->cd_length = data->cd_offset - saved_offset; 981 } else { 982 data->cd_length = saved_length; 983 } 984 985 data->cd_offset = saved_offset; 986 if (ret != CRYPTO_SUCCESS) { 987 return (ret); 988 } 989 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 990 /* 991 * This is where all the plaintext is returned, make sure 992 * the plaintext buffer is big enough 993 */ 994 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx; 995 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len; 996 997 if (data->cd_length < pt_len) { 998 data->cd_length = pt_len; 999 return (CRYPTO_BUFFER_TOO_SMALL); 1000 } 1001 1002 saved_offset = data->cd_offset; 1003 saved_length = data->cd_length; 1004 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data, 1005 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 1006 if (ret == CRYPTO_SUCCESS) { 1007 data->cd_length = data->cd_offset - saved_offset; 1008 } else { 1009 data->cd_length = saved_length; 1010 } 1011 1012 data->cd_offset = saved_offset; 1013 if (ret != CRYPTO_SUCCESS) { 1014 return (ret); 1015 } 1016 } 1017 1018 1019 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) { 1020 data->cd_length = 0; 1021 } 1022 1023 (void) aes_free_context(ctx); 1024 1025 /* EXPORT DELETE END */ 1026 1027 return (CRYPTO_SUCCESS); 1028 } 1029 1030 /* ARGSUSED */ 1031 static int 1032 aes_encrypt_atomic(crypto_provider_handle_t provider, 1033 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1034 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext, 1035 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1036 { 1037 aes_ctx_t aes_ctx; /* on the stack */ 1038 off_t saved_offset; 1039 size_t saved_length; 1040 size_t length_needed; 1041 int ret; 1042 1043 AES_ARG_INPLACE(plaintext, ciphertext); 1044 1045 /* 1046 * CTR, CCM, GCM, and GMAC modes do not require that plaintext 1047 * be a multiple of AES block size. 1048 */ 1049 switch (mechanism->cm_type) { 1050 case AES_CTR_MECH_INFO_TYPE: 1051 case AES_CCM_MECH_INFO_TYPE: 1052 case AES_GCM_MECH_INFO_TYPE: 1053 case AES_GMAC_MECH_INFO_TYPE: 1054 break; 1055 default: 1056 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 1057 return (CRYPTO_DATA_LEN_RANGE); 1058 } 1059 1060 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS) 1061 return (ret); 1062 1063 bzero(&aes_ctx, sizeof (aes_ctx_t)); 1064 1065 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 1066 crypto_kmflag(req), B_TRUE); 1067 if (ret != CRYPTO_SUCCESS) 1068 return (ret); 1069 1070 switch (mechanism->cm_type) { 1071 case AES_CCM_MECH_INFO_TYPE: 1072 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len; 1073 break; 1074 case AES_GMAC_MECH_INFO_TYPE: 1075 if (plaintext->cd_length != 0) 1076 return (CRYPTO_ARGUMENTS_BAD); 1077 /* FALLTHRU */ 1078 case AES_GCM_MECH_INFO_TYPE: 1079 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len; 1080 break; 1081 default: 1082 length_needed = plaintext->cd_length; 1083 } 1084 1085 /* return size of buffer needed to store output */ 1086 if (ciphertext->cd_length < length_needed) { 1087 ciphertext->cd_length = length_needed; 1088 ret = CRYPTO_BUFFER_TOO_SMALL; 1089 goto out; 1090 } 1091 1092 saved_offset = ciphertext->cd_offset; 1093 saved_length = ciphertext->cd_length; 1094 1095 /* 1096 * Do an update on the specified input data. 1097 */ 1098 switch (plaintext->cd_format) { 1099 case CRYPTO_DATA_RAW: 1100 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext, 1101 aes_encrypt_contiguous_blocks, aes_copy_block64); 1102 break; 1103 case CRYPTO_DATA_UIO: 1104 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext, 1105 aes_encrypt_contiguous_blocks, aes_copy_block64); 1106 break; 1107 case CRYPTO_DATA_MBLK: 1108 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext, 1109 aes_encrypt_contiguous_blocks, aes_copy_block64); 1110 break; 1111 default: 1112 ret = CRYPTO_ARGUMENTS_BAD; 1113 } 1114 1115 if (ret == CRYPTO_SUCCESS) { 1116 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1117 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx, 1118 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 1119 aes_xor_block); 1120 if (ret != CRYPTO_SUCCESS) 1121 goto out; 1122 ASSERT(aes_ctx.ac_remainder_len == 0); 1123 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1124 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1125 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx, 1126 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 1127 aes_copy_block, aes_xor_block); 1128 if (ret != CRYPTO_SUCCESS) 1129 goto out; 1130 ASSERT(aes_ctx.ac_remainder_len == 0); 1131 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) { 1132 if (aes_ctx.ac_remainder_len > 0) { 1133 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1134 ciphertext, aes_encrypt_block); 1135 if (ret != CRYPTO_SUCCESS) 1136 goto out; 1137 } 1138 } else { 1139 ASSERT(aes_ctx.ac_remainder_len == 0); 1140 } 1141 1142 if (plaintext != ciphertext) { 1143 ciphertext->cd_length = 1144 ciphertext->cd_offset - saved_offset; 1145 } 1146 } else { 1147 ciphertext->cd_length = saved_length; 1148 } 1149 ciphertext->cd_offset = saved_offset; 1150 1151 out: 1152 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1153 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1154 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1155 } 1156 1157 return (ret); 1158 } 1159 1160 /* ARGSUSED */ 1161 static int 1162 aes_decrypt_atomic(crypto_provider_handle_t provider, 1163 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1164 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext, 1165 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1166 { 1167 aes_ctx_t aes_ctx; /* on the stack */ 1168 off_t saved_offset; 1169 size_t saved_length; 1170 size_t length_needed; 1171 int ret; 1172 1173 AES_ARG_INPLACE(ciphertext, plaintext); 1174 1175 /* 1176 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext 1177 * be a multiple of AES block size. 1178 */ 1179 switch (mechanism->cm_type) { 1180 case AES_CTR_MECH_INFO_TYPE: 1181 case AES_CCM_MECH_INFO_TYPE: 1182 case AES_GCM_MECH_INFO_TYPE: 1183 case AES_GMAC_MECH_INFO_TYPE: 1184 break; 1185 default: 1186 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 1187 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 1188 } 1189 1190 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS) 1191 return (ret); 1192 1193 bzero(&aes_ctx, sizeof (aes_ctx_t)); 1194 1195 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 1196 crypto_kmflag(req), B_FALSE); 1197 if (ret != CRYPTO_SUCCESS) 1198 return (ret); 1199 1200 switch (mechanism->cm_type) { 1201 case AES_CCM_MECH_INFO_TYPE: 1202 length_needed = aes_ctx.ac_data_len; 1203 break; 1204 case AES_GCM_MECH_INFO_TYPE: 1205 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len; 1206 break; 1207 case AES_GMAC_MECH_INFO_TYPE: 1208 if (plaintext->cd_length != 0) 1209 return (CRYPTO_ARGUMENTS_BAD); 1210 length_needed = 0; 1211 break; 1212 default: 1213 length_needed = ciphertext->cd_length; 1214 } 1215 1216 /* return size of buffer needed to store output */ 1217 if (plaintext->cd_length < length_needed) { 1218 plaintext->cd_length = length_needed; 1219 ret = CRYPTO_BUFFER_TOO_SMALL; 1220 goto out; 1221 } 1222 1223 saved_offset = plaintext->cd_offset; 1224 saved_length = plaintext->cd_length; 1225 1226 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1227 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) 1228 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req)); 1229 1230 /* 1231 * Do an update on the specified input data. 1232 */ 1233 switch (ciphertext->cd_format) { 1234 case CRYPTO_DATA_RAW: 1235 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext, 1236 aes_decrypt_contiguous_blocks, aes_copy_block64); 1237 break; 1238 case CRYPTO_DATA_UIO: 1239 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext, 1240 aes_decrypt_contiguous_blocks, aes_copy_block64); 1241 break; 1242 case CRYPTO_DATA_MBLK: 1243 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext, 1244 aes_decrypt_contiguous_blocks, aes_copy_block64); 1245 break; 1246 default: 1247 ret = CRYPTO_ARGUMENTS_BAD; 1248 } 1249 1250 if (ret == CRYPTO_SUCCESS) { 1251 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1252 ASSERT(aes_ctx.ac_processed_data_len 1253 == aes_ctx.ac_data_len); 1254 ASSERT(aes_ctx.ac_processed_mac_len 1255 == aes_ctx.ac_mac_len); 1256 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx, 1257 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1258 aes_copy_block, aes_xor_block); 1259 ASSERT(aes_ctx.ac_remainder_len == 0); 1260 if ((ret == CRYPTO_SUCCESS) && 1261 (ciphertext != plaintext)) { 1262 plaintext->cd_length = 1263 plaintext->cd_offset - saved_offset; 1264 } else { 1265 plaintext->cd_length = saved_length; 1266 } 1267 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1268 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1269 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx, 1270 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1271 aes_xor_block); 1272 ASSERT(aes_ctx.ac_remainder_len == 0); 1273 if ((ret == CRYPTO_SUCCESS) && 1274 (ciphertext != plaintext)) { 1275 plaintext->cd_length = 1276 plaintext->cd_offset - saved_offset; 1277 } else { 1278 plaintext->cd_length = saved_length; 1279 } 1280 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) { 1281 ASSERT(aes_ctx.ac_remainder_len == 0); 1282 if (ciphertext != plaintext) 1283 plaintext->cd_length = 1284 plaintext->cd_offset - saved_offset; 1285 } else { 1286 if (aes_ctx.ac_remainder_len > 0) { 1287 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1288 plaintext, aes_encrypt_block); 1289 if (ret == CRYPTO_DATA_LEN_RANGE) 1290 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 1291 if (ret != CRYPTO_SUCCESS) 1292 goto out; 1293 } 1294 if (ciphertext != plaintext) 1295 plaintext->cd_length = 1296 plaintext->cd_offset - saved_offset; 1297 } 1298 } else { 1299 plaintext->cd_length = saved_length; 1300 } 1301 plaintext->cd_offset = saved_offset; 1302 1303 out: 1304 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1305 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1306 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1307 } 1308 1309 if (aes_ctx.ac_flags & CCM_MODE) { 1310 if (aes_ctx.ac_pt_buf != NULL) { 1311 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len); 1312 } 1313 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) { 1314 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) { 1315 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf, 1316 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len); 1317 } 1318 } 1319 1320 return (ret); 1321 } 1322 1323 /* 1324 * KCF software provider context template entry points. 1325 */ 1326 /* ARGSUSED */ 1327 static int 1328 aes_create_ctx_template(crypto_provider_handle_t provider, 1329 crypto_mechanism_t *mechanism, crypto_key_t *key, 1330 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req) 1331 { 1332 1333 /* EXPORT DELETE START */ 1334 1335 void *keysched; 1336 size_t size; 1337 int rv; 1338 1339 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE && 1340 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE && 1341 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE && 1342 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE && 1343 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE && 1344 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE) 1345 return (CRYPTO_MECHANISM_INVALID); 1346 1347 if ((keysched = aes_alloc_keysched(&size, 1348 crypto_kmflag(req))) == NULL) { 1349 return (CRYPTO_HOST_MEMORY); 1350 } 1351 1352 /* 1353 * Initialize key schedule. Key length information is stored 1354 * in the key. 1355 */ 1356 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1357 bzero(keysched, size); 1358 kmem_free(keysched, size); 1359 return (rv); 1360 } 1361 1362 *tmpl = keysched; 1363 *tmpl_size = size; 1364 1365 /* EXPORT DELETE END */ 1366 1367 return (CRYPTO_SUCCESS); 1368 } 1369 1370 /* ARGSUSED */ 1371 static int 1372 aes_free_context(crypto_ctx_t *ctx) 1373 { 1374 1375 /* EXPORT DELETE START */ 1376 1377 aes_ctx_t *aes_ctx = ctx->cc_provider_private; 1378 1379 if (aes_ctx != NULL) { 1380 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1381 ASSERT(aes_ctx->ac_keysched_len != 0); 1382 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len); 1383 kmem_free(aes_ctx->ac_keysched, 1384 aes_ctx->ac_keysched_len); 1385 } 1386 crypto_free_mode_ctx(aes_ctx); 1387 ctx->cc_provider_private = NULL; 1388 } 1389 1390 /* EXPORT DELETE END */ 1391 1392 return (CRYPTO_SUCCESS); 1393 } 1394 1395 /* ARGSUSED */ 1396 static int 1397 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template, 1398 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag, 1399 boolean_t is_encrypt_init) 1400 { 1401 int rv = CRYPTO_SUCCESS; 1402 1403 /* EXPORT DELETE START */ 1404 1405 void *keysched; 1406 size_t size; 1407 1408 if (template == NULL) { 1409 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL) 1410 return (CRYPTO_HOST_MEMORY); 1411 /* 1412 * Initialize key schedule. 1413 * Key length is stored in the key. 1414 */ 1415 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1416 kmem_free(keysched, size); 1417 return (rv); 1418 } 1419 1420 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE; 1421 aes_ctx->ac_keysched_len = size; 1422 } else { 1423 keysched = template; 1424 } 1425 aes_ctx->ac_keysched = keysched; 1426 1427 switch (mechanism->cm_type) { 1428 case AES_CBC_MECH_INFO_TYPE: 1429 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param, 1430 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64); 1431 break; 1432 case AES_CTR_MECH_INFO_TYPE: { 1433 CK_AES_CTR_PARAMS *pp; 1434 1435 if (mechanism->cm_param == NULL || 1436 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) { 1437 return (CRYPTO_MECHANISM_PARAM_INVALID); 1438 } 1439 pp = (CK_AES_CTR_PARAMS *)mechanism->cm_param; 1440 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits, 1441 pp->cb, aes_copy_block); 1442 break; 1443 } 1444 case AES_CCM_MECH_INFO_TYPE: 1445 if (mechanism->cm_param == NULL || 1446 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) { 1447 return (CRYPTO_MECHANISM_PARAM_INVALID); 1448 } 1449 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param, 1450 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block, 1451 aes_xor_block); 1452 break; 1453 case AES_GCM_MECH_INFO_TYPE: 1454 if (mechanism->cm_param == NULL || 1455 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) { 1456 return (CRYPTO_MECHANISM_PARAM_INVALID); 1457 } 1458 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1459 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1460 aes_xor_block); 1461 break; 1462 case AES_GMAC_MECH_INFO_TYPE: 1463 if (mechanism->cm_param == NULL || 1464 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) { 1465 return (CRYPTO_MECHANISM_PARAM_INVALID); 1466 } 1467 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1468 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1469 aes_xor_block); 1470 break; 1471 case AES_ECB_MECH_INFO_TYPE: 1472 aes_ctx->ac_flags |= ECB_MODE; 1473 } 1474 1475 if (rv != CRYPTO_SUCCESS) { 1476 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1477 bzero(keysched, size); 1478 kmem_free(keysched, size); 1479 } 1480 } 1481 1482 /* EXPORT DELETE END */ 1483 1484 return (rv); 1485 } 1486 1487 static int 1488 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data, 1489 CK_AES_GCM_PARAMS *gcm_params) 1490 { 1491 /* LINTED: pointer alignment */ 1492 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param; 1493 1494 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE) 1495 return (CRYPTO_MECHANISM_INVALID); 1496 1497 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) 1498 return (CRYPTO_MECHANISM_PARAM_INVALID); 1499 1500 if (params->pIv == NULL) 1501 return (CRYPTO_MECHANISM_PARAM_INVALID); 1502 1503 gcm_params->pIv = params->pIv; 1504 gcm_params->ulIvLen = AES_GMAC_IV_LEN; 1505 gcm_params->ulTagBits = AES_GMAC_TAG_BITS; 1506 1507 if (data == NULL) 1508 return (CRYPTO_SUCCESS); 1509 1510 if (data->cd_format != CRYPTO_DATA_RAW) 1511 return (CRYPTO_ARGUMENTS_BAD); 1512 1513 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base; 1514 gcm_params->ulAADLen = data->cd_length; 1515 return (CRYPTO_SUCCESS); 1516 } 1517 1518 static int 1519 aes_mac_atomic(crypto_provider_handle_t provider, 1520 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1521 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1522 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1523 { 1524 CK_AES_GCM_PARAMS gcm_params; 1525 crypto_mechanism_t gcm_mech; 1526 int rv; 1527 1528 if ((rv = process_gmac_mech(mechanism, data, &gcm_params)) 1529 != CRYPTO_SUCCESS) 1530 return (rv); 1531 1532 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE; 1533 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS); 1534 gcm_mech.cm_param = (char *)&gcm_params; 1535 1536 return (aes_encrypt_atomic(provider, session_id, &gcm_mech, 1537 key, &null_crypto_data, mac, template, req)); 1538 } 1539 1540 static int 1541 aes_mac_verify_atomic(crypto_provider_handle_t provider, 1542 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1543 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1544 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1545 { 1546 CK_AES_GCM_PARAMS gcm_params; 1547 crypto_mechanism_t gcm_mech; 1548 int rv; 1549 1550 if ((rv = process_gmac_mech(mechanism, data, &gcm_params)) 1551 != CRYPTO_SUCCESS) 1552 return (rv); 1553 1554 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE; 1555 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS); 1556 gcm_mech.cm_param = (char *)&gcm_params; 1557 1558 return (aes_decrypt_atomic(provider, session_id, &gcm_mech, 1559 key, mac, &null_crypto_data, template, req)); 1560 } 1561