1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved. 23 * Copyright 2017 Nexenta Systems, Inc. All rights reserved. 24 * Copyright 2018, Joyent, Inc. 25 */ 26 27 /* 28 * AES provider for the Kernel Cryptographic Framework (KCF) 29 */ 30 31 #include <sys/types.h> 32 #include <sys/systm.h> 33 #include <sys/modctl.h> 34 #include <sys/cmn_err.h> 35 #include <sys/ddi.h> 36 #include <sys/crypto/common.h> 37 #include <sys/crypto/impl.h> 38 #include <sys/crypto/spi.h> 39 #include <sys/sysmacros.h> 40 #include <sys/strsun.h> 41 #include <modes/modes.h> 42 #define _AES_IMPL 43 #include <aes/aes_impl.h> 44 45 extern struct mod_ops mod_cryptoops; 46 47 /* 48 * Module linkage information for the kernel. 49 */ 50 static struct modlcrypto modlcrypto = { 51 &mod_cryptoops, 52 "AES Kernel SW Provider" 53 }; 54 55 static struct modlinkage modlinkage = { 56 MODREV_1, 57 (void *)&modlcrypto, 58 NULL 59 }; 60 61 /* 62 * Mechanism info structure passed to KCF during registration. 63 */ 64 static crypto_mech_info_t aes_mech_info_tab[] = { 65 /* AES_ECB */ 66 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE, 67 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 68 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 69 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 70 /* AES_CBC */ 71 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE, 72 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 73 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 74 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 75 /* AES_CMAC */ 76 {SUN_CKM_AES_CMAC, AES_CMAC_MECH_INFO_TYPE, 77 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 78 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC | 79 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 80 /* AES_CTR */ 81 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE, 82 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 83 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 84 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 85 /* AES_CCM */ 86 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE, 87 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 88 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 89 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 90 /* AES_GCM */ 91 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE, 92 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 93 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC, 94 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}, 95 /* AES_GMAC */ 96 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE, 97 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | 98 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC | 99 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC | 100 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC | 101 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC, 102 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES} 103 }; 104 105 /* operations are in-place if the output buffer is NULL */ 106 #define AES_ARG_INPLACE(input, output) \ 107 if ((output) == NULL) \ 108 (output) = (input); 109 110 static void aes_provider_status(crypto_provider_handle_t, uint_t *); 111 112 static crypto_control_ops_t aes_control_ops = { 113 aes_provider_status 114 }; 115 116 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 117 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 118 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *, 119 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 120 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *, 121 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t); 122 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *, 123 crypto_mechanism_t *, crypto_key_t *, int, boolean_t); 124 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *, 125 crypto_req_handle_t); 126 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *, 127 crypto_req_handle_t); 128 129 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 130 crypto_req_handle_t); 131 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *, 132 crypto_data_t *, crypto_req_handle_t); 133 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t, 134 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 135 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 136 137 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 138 crypto_req_handle_t); 139 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *, 140 crypto_data_t *, crypto_req_handle_t); 141 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t, 142 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, 143 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 144 145 static crypto_cipher_ops_t aes_cipher_ops = { 146 aes_encrypt_init, 147 aes_encrypt, 148 aes_encrypt_update, 149 aes_encrypt_final, 150 aes_encrypt_atomic, 151 aes_decrypt_init, 152 aes_decrypt, 153 aes_decrypt_update, 154 aes_decrypt_final, 155 aes_decrypt_atomic 156 }; 157 158 static int aes_mac_init(crypto_ctx_t *, crypto_mechanism_t *, 159 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t); 160 static int aes_mac(crypto_ctx_t *, crypto_data_t *, crypto_data_t *, 161 crypto_req_handle_t); 162 static int aes_mac_update(crypto_ctx_t *, crypto_data_t *, 163 crypto_req_handle_t); 164 static int aes_mac_final(crypto_ctx_t *, crypto_data_t *, 165 crypto_req_handle_t); 166 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t, 167 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *, 168 crypto_spi_ctx_template_t, crypto_req_handle_t); 169 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t, 170 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *, 171 crypto_spi_ctx_template_t, crypto_req_handle_t); 172 173 static crypto_mac_ops_t aes_mac_ops = { 174 aes_mac_init, 175 aes_mac, 176 aes_mac_update, 177 aes_mac_final, 178 aes_mac_atomic, 179 aes_mac_verify_atomic 180 }; 181 182 static int aes_create_ctx_template(crypto_provider_handle_t, 183 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *, 184 size_t *, crypto_req_handle_t); 185 static int aes_free_context(crypto_ctx_t *); 186 187 static crypto_ctx_ops_t aes_ctx_ops = { 188 aes_create_ctx_template, 189 aes_free_context 190 }; 191 192 static crypto_ops_t aes_crypto_ops = { 193 &aes_control_ops, 194 NULL, 195 &aes_cipher_ops, 196 &aes_mac_ops, 197 NULL, 198 NULL, 199 NULL, 200 NULL, 201 NULL, 202 NULL, 203 NULL, 204 NULL, 205 NULL, 206 &aes_ctx_ops, 207 NULL, 208 NULL, 209 NULL, 210 }; 211 212 static crypto_provider_info_t aes_prov_info = { 213 CRYPTO_SPI_VERSION_4, 214 "AES Software Provider", 215 CRYPTO_SW_PROVIDER, 216 {&modlinkage}, 217 NULL, 218 &aes_crypto_ops, 219 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t), 220 aes_mech_info_tab 221 }; 222 223 static crypto_kcf_provider_handle_t aes_prov_handle = 0; 224 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW }; 225 226 int 227 _init(void) 228 { 229 int ret; 230 231 if ((ret = mod_install(&modlinkage)) != 0) 232 return (ret); 233 234 /* Register with KCF. If the registration fails, remove the module. */ 235 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) { 236 (void) mod_remove(&modlinkage); 237 return (EACCES); 238 } 239 240 return (0); 241 } 242 243 int 244 _fini(void) 245 { 246 /* Unregister from KCF if module is registered */ 247 if (aes_prov_handle != 0) { 248 if (crypto_unregister_provider(aes_prov_handle)) 249 return (EBUSY); 250 251 aes_prov_handle = 0; 252 } 253 254 return (mod_remove(&modlinkage)); 255 } 256 257 int 258 _info(struct modinfo *modinfop) 259 { 260 return (mod_info(&modlinkage, modinfop)); 261 } 262 263 264 static int 265 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag) 266 { 267 void *p = NULL; 268 boolean_t param_required = B_TRUE; 269 size_t param_len; 270 void *(*alloc_fun)(int); 271 int rv = CRYPTO_SUCCESS; 272 273 switch (mechanism->cm_type) { 274 case AES_ECB_MECH_INFO_TYPE: 275 param_required = B_FALSE; 276 alloc_fun = ecb_alloc_ctx; 277 break; 278 case AES_CBC_MECH_INFO_TYPE: 279 param_len = AES_BLOCK_LEN; 280 alloc_fun = cbc_alloc_ctx; 281 break; 282 case AES_CMAC_MECH_INFO_TYPE: 283 param_required = B_FALSE; 284 alloc_fun = cmac_alloc_ctx; 285 break; 286 case AES_CTR_MECH_INFO_TYPE: 287 param_len = sizeof (CK_AES_CTR_PARAMS); 288 alloc_fun = ctr_alloc_ctx; 289 break; 290 case AES_CCM_MECH_INFO_TYPE: 291 param_len = sizeof (CK_AES_CCM_PARAMS); 292 alloc_fun = ccm_alloc_ctx; 293 break; 294 case AES_GCM_MECH_INFO_TYPE: 295 param_len = sizeof (CK_AES_GCM_PARAMS); 296 alloc_fun = gcm_alloc_ctx; 297 break; 298 case AES_GMAC_MECH_INFO_TYPE: 299 param_len = sizeof (CK_AES_GMAC_PARAMS); 300 alloc_fun = gmac_alloc_ctx; 301 break; 302 default: 303 rv = CRYPTO_MECHANISM_INVALID; 304 return (rv); 305 } 306 if (param_required && mechanism->cm_param != NULL && 307 mechanism->cm_param_len != param_len) { 308 rv = CRYPTO_MECHANISM_PARAM_INVALID; 309 } 310 if (ctx != NULL) { 311 p = (alloc_fun)(kmflag); 312 *ctx = p; 313 } 314 return (rv); 315 } 316 317 /* 318 * Initialize key schedules for AES 319 */ 320 static int 321 init_keysched(crypto_key_t *key, void *newbie) 322 { 323 /* 324 * Only keys by value are supported by this module. 325 */ 326 switch (key->ck_format) { 327 case CRYPTO_KEY_RAW: 328 if (key->ck_length < AES_MINBITS || 329 key->ck_length > AES_MAXBITS) { 330 return (CRYPTO_KEY_SIZE_RANGE); 331 } 332 333 /* key length must be either 128, 192, or 256 */ 334 if ((key->ck_length & 63) != 0) 335 return (CRYPTO_KEY_SIZE_RANGE); 336 break; 337 default: 338 return (CRYPTO_KEY_TYPE_INCONSISTENT); 339 } 340 341 aes_init_keysched(key->ck_data, key->ck_length, newbie); 342 return (CRYPTO_SUCCESS); 343 } 344 345 /* 346 * KCF software provider control entry points. 347 */ 348 /* ARGSUSED */ 349 static void 350 aes_provider_status(crypto_provider_handle_t provider, uint_t *status) 351 { 352 *status = CRYPTO_PROVIDER_READY; 353 } 354 355 static int 356 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 357 crypto_key_t *key, crypto_spi_ctx_template_t template, 358 crypto_req_handle_t req) 359 { 360 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE)); 361 } 362 363 static int 364 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 365 crypto_key_t *key, crypto_spi_ctx_template_t template, 366 crypto_req_handle_t req) 367 { 368 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE)); 369 } 370 371 372 373 /* 374 * KCF software provider encrypt entry points. 375 */ 376 static int 377 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 378 crypto_key_t *key, crypto_spi_ctx_template_t template, 379 crypto_req_handle_t req, boolean_t is_encrypt_init) 380 { 381 aes_ctx_t *aes_ctx; 382 int rv; 383 int kmflag; 384 385 /* 386 * Only keys by value are supported by this module. 387 */ 388 if (key->ck_format != CRYPTO_KEY_RAW) { 389 return (CRYPTO_KEY_TYPE_INCONSISTENT); 390 } 391 392 kmflag = crypto_kmflag(req); 393 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag)) 394 != CRYPTO_SUCCESS) 395 return (rv); 396 397 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag, 398 is_encrypt_init); 399 if (rv != CRYPTO_SUCCESS) { 400 crypto_free_mode_ctx(aes_ctx); 401 return (rv); 402 } 403 404 ctx->cc_provider_private = aes_ctx; 405 406 return (CRYPTO_SUCCESS); 407 } 408 409 static int 410 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext, 411 crypto_data_t *ciphertext, crypto_req_handle_t req) 412 { 413 int ret = CRYPTO_FAILED; 414 415 aes_ctx_t *aes_ctx; 416 size_t saved_length, saved_offset, length_needed; 417 418 ASSERT(ctx->cc_provider_private != NULL); 419 aes_ctx = ctx->cc_provider_private; 420 421 /* 422 * For block ciphers, plaintext must be a multiple of AES block size. 423 * This test is only valid for ciphers whose blocksize is a power of 2. 424 */ 425 if (((aes_ctx->ac_flags & (CMAC_MODE|CTR_MODE|CCM_MODE| 426 GCM_MODE|GMAC_MODE)) == 0) && 427 (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 428 return (CRYPTO_DATA_LEN_RANGE); 429 430 AES_ARG_INPLACE(plaintext, ciphertext); 431 432 /* 433 * We need to just return the length needed to store the output. 434 * We should not destroy the context for the following case. 435 */ 436 switch (aes_ctx->ac_flags & (CMAC_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) { 437 case CCM_MODE: 438 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len; 439 break; 440 case GCM_MODE: 441 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len; 442 break; 443 case CMAC_MODE: 444 length_needed = AES_BLOCK_LEN; 445 break; 446 case GMAC_MODE: 447 if (plaintext->cd_length != 0) 448 return (CRYPTO_ARGUMENTS_BAD); 449 450 length_needed = aes_ctx->ac_tag_len; 451 break; 452 default: 453 length_needed = plaintext->cd_length; 454 } 455 456 if (ciphertext->cd_length < length_needed) { 457 ciphertext->cd_length = length_needed; 458 return (CRYPTO_BUFFER_TOO_SMALL); 459 } 460 461 saved_length = ciphertext->cd_length; 462 saved_offset = ciphertext->cd_offset; 463 464 /* 465 * Do an update on the specified input data. 466 */ 467 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req); 468 if (ret != CRYPTO_SUCCESS) { 469 return (ret); 470 } 471 472 /* 473 * For CCM mode, aes_ccm_encrypt_final() will take care of any 474 * left-over unprocessed data, and compute the MAC 475 */ 476 if (aes_ctx->ac_flags & CCM_MODE) { 477 /* 478 * ccm_encrypt_final() will compute the MAC and append 479 * it to existing ciphertext. So, need to adjust the left over 480 * length value accordingly 481 */ 482 483 /* order of following 2 lines MUST not be reversed */ 484 ciphertext->cd_offset = ciphertext->cd_length; 485 ciphertext->cd_length = saved_length - ciphertext->cd_length; 486 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext, 487 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 488 if (ret != CRYPTO_SUCCESS) { 489 return (ret); 490 } 491 492 if (plaintext != ciphertext) { 493 ciphertext->cd_length = 494 ciphertext->cd_offset - saved_offset; 495 } 496 ciphertext->cd_offset = saved_offset; 497 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 498 /* 499 * gcm_encrypt_final() will compute the MAC and append 500 * it to existing ciphertext. So, need to adjust the left over 501 * length value accordingly 502 */ 503 504 /* order of following 2 lines MUST not be reversed */ 505 ciphertext->cd_offset = ciphertext->cd_length; 506 ciphertext->cd_length = saved_length - ciphertext->cd_length; 507 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext, 508 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 509 aes_xor_block); 510 if (ret != CRYPTO_SUCCESS) { 511 return (ret); 512 } 513 514 if (plaintext != ciphertext) { 515 ciphertext->cd_length = 516 ciphertext->cd_offset - saved_offset; 517 } 518 ciphertext->cd_offset = saved_offset; 519 } else if (aes_ctx->ac_flags & CMAC_MODE) { 520 /* cmac_update doesn't store data */ 521 ciphertext->cd_length = saved_length; 522 ret = cmac_mode_final((cbc_ctx_t *)aes_ctx, ciphertext, 523 aes_encrypt_block, aes_xor_block); 524 aes_ctx->ac_remainder_len = 0; 525 } 526 527 ASSERT(aes_ctx->ac_remainder_len == 0); 528 (void) aes_free_context(ctx); 529 530 return (ret); 531 } 532 533 534 static int 535 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 536 crypto_data_t *plaintext, crypto_req_handle_t req) 537 { 538 int ret = CRYPTO_FAILED; 539 540 aes_ctx_t *aes_ctx; 541 off_t saved_offset; 542 size_t saved_length, length_needed; 543 544 ASSERT(ctx->cc_provider_private != NULL); 545 aes_ctx = ctx->cc_provider_private; 546 547 /* 548 * For block ciphers, plaintext must be a multiple of AES block size. 549 * This test is only valid for ciphers whose blocksize is a power of 2. 550 */ 551 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) 552 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) { 553 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 554 } 555 556 AES_ARG_INPLACE(ciphertext, plaintext); 557 558 /* 559 * Return length needed to store the output. 560 * Do not destroy context when plaintext buffer is too small. 561 * 562 * CCM: plaintext is MAC len smaller than cipher text 563 * GCM: plaintext is TAG len smaller than cipher text 564 * GMAC: plaintext length must be zero 565 */ 566 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) { 567 case CCM_MODE: 568 length_needed = aes_ctx->ac_processed_data_len; 569 break; 570 case GCM_MODE: 571 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len; 572 break; 573 case GMAC_MODE: 574 if (plaintext->cd_length != 0) 575 return (CRYPTO_ARGUMENTS_BAD); 576 577 length_needed = 0; 578 break; 579 default: 580 length_needed = ciphertext->cd_length; 581 } 582 583 if (plaintext->cd_length < length_needed) { 584 plaintext->cd_length = length_needed; 585 return (CRYPTO_BUFFER_TOO_SMALL); 586 } 587 588 saved_offset = plaintext->cd_offset; 589 saved_length = plaintext->cd_length; 590 591 /* 592 * Do an update on the specified input data. 593 */ 594 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req); 595 if (ret != CRYPTO_SUCCESS) { 596 goto cleanup; 597 } 598 599 if (aes_ctx->ac_flags & CCM_MODE) { 600 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len); 601 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 602 603 /* order of following 2 lines MUST not be reversed */ 604 plaintext->cd_offset = plaintext->cd_length; 605 plaintext->cd_length = saved_length - plaintext->cd_length; 606 607 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext, 608 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 609 aes_xor_block); 610 if (ret == CRYPTO_SUCCESS) { 611 if (plaintext != ciphertext) { 612 plaintext->cd_length = 613 plaintext->cd_offset - saved_offset; 614 } 615 } else { 616 plaintext->cd_length = saved_length; 617 } 618 619 plaintext->cd_offset = saved_offset; 620 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 621 /* order of following 2 lines MUST not be reversed */ 622 plaintext->cd_offset = plaintext->cd_length; 623 plaintext->cd_length = saved_length - plaintext->cd_length; 624 625 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext, 626 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 627 if (ret == CRYPTO_SUCCESS) { 628 if (plaintext != ciphertext) { 629 plaintext->cd_length = 630 plaintext->cd_offset - saved_offset; 631 } 632 } else { 633 plaintext->cd_length = saved_length; 634 } 635 636 plaintext->cd_offset = saved_offset; 637 } 638 639 ASSERT(aes_ctx->ac_remainder_len == 0); 640 641 cleanup: 642 (void) aes_free_context(ctx); 643 644 return (ret); 645 } 646 647 648 /* ARGSUSED */ 649 static int 650 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext, 651 crypto_data_t *ciphertext, crypto_req_handle_t req) 652 { 653 off_t saved_offset; 654 size_t saved_length, out_len; 655 int ret = CRYPTO_SUCCESS; 656 aes_ctx_t *aes_ctx; 657 658 ASSERT(ctx->cc_provider_private != NULL); 659 aes_ctx = ctx->cc_provider_private; 660 661 AES_ARG_INPLACE(plaintext, ciphertext); 662 663 /* compute number of bytes that will hold the ciphertext */ 664 out_len = aes_ctx->ac_remainder_len; 665 out_len += plaintext->cd_length; 666 out_len &= ~(AES_BLOCK_LEN - 1); 667 668 /* 669 * return length needed to store the output. 670 * CMAC stores its output in a local buffer until *_final. 671 */ 672 if ((aes_ctx->ac_flags & CMAC_MODE) == 0 && 673 ciphertext->cd_length < out_len) { 674 ciphertext->cd_length = out_len; 675 return (CRYPTO_BUFFER_TOO_SMALL); 676 } 677 678 saved_offset = ciphertext->cd_offset; 679 saved_length = ciphertext->cd_length; 680 681 /* 682 * Do the AES update on the specified input data. 683 */ 684 switch (plaintext->cd_format) { 685 case CRYPTO_DATA_RAW: 686 ret = crypto_update_iov(ctx->cc_provider_private, 687 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 688 aes_copy_block64); 689 break; 690 case CRYPTO_DATA_UIO: 691 ret = crypto_update_uio(ctx->cc_provider_private, 692 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 693 aes_copy_block64); 694 break; 695 case CRYPTO_DATA_MBLK: 696 ret = crypto_update_mp(ctx->cc_provider_private, 697 plaintext, ciphertext, aes_encrypt_contiguous_blocks, 698 aes_copy_block64); 699 break; 700 default: 701 ret = CRYPTO_ARGUMENTS_BAD; 702 } 703 704 /* 705 * Since AES counter mode is a stream cipher, we call 706 * ctr_mode_final() to pick up any remaining bytes. 707 * It is an internal function that does not destroy 708 * the context like *normal* final routines. 709 */ 710 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 711 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, 712 ciphertext, aes_encrypt_block); 713 } 714 715 if (ret == CRYPTO_SUCCESS) { 716 if (plaintext != ciphertext) 717 ciphertext->cd_length = 718 ciphertext->cd_offset - saved_offset; 719 } else { 720 ciphertext->cd_length = saved_length; 721 } 722 ciphertext->cd_offset = saved_offset; 723 724 return (ret); 725 } 726 727 728 static int 729 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext, 730 crypto_data_t *plaintext, crypto_req_handle_t req) 731 { 732 off_t saved_offset; 733 size_t saved_length, out_len; 734 int ret = CRYPTO_SUCCESS; 735 aes_ctx_t *aes_ctx; 736 737 ASSERT(ctx->cc_provider_private != NULL); 738 aes_ctx = ctx->cc_provider_private; 739 740 AES_ARG_INPLACE(ciphertext, plaintext); 741 742 /* 743 * Compute number of bytes that will hold the plaintext. 744 * This is not necessary for CCM, GCM, and GMAC since these 745 * mechanisms never return plaintext for update operations. 746 */ 747 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) { 748 out_len = aes_ctx->ac_remainder_len; 749 out_len += ciphertext->cd_length; 750 out_len &= ~(AES_BLOCK_LEN - 1); 751 752 /* return length needed to store the output */ 753 if (plaintext->cd_length < out_len) { 754 plaintext->cd_length = out_len; 755 return (CRYPTO_BUFFER_TOO_SMALL); 756 } 757 } 758 759 saved_offset = plaintext->cd_offset; 760 saved_length = plaintext->cd_length; 761 762 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) 763 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req)); 764 765 /* 766 * Do the AES update on the specified input data. 767 */ 768 switch (ciphertext->cd_format) { 769 case CRYPTO_DATA_RAW: 770 ret = crypto_update_iov(ctx->cc_provider_private, 771 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 772 aes_copy_block64); 773 break; 774 case CRYPTO_DATA_UIO: 775 ret = crypto_update_uio(ctx->cc_provider_private, 776 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 777 aes_copy_block64); 778 break; 779 case CRYPTO_DATA_MBLK: 780 ret = crypto_update_mp(ctx->cc_provider_private, 781 ciphertext, plaintext, aes_decrypt_contiguous_blocks, 782 aes_copy_block64); 783 break; 784 default: 785 ret = CRYPTO_ARGUMENTS_BAD; 786 } 787 788 /* 789 * Since AES counter mode is a stream cipher, we call 790 * ctr_mode_final() to pick up any remaining bytes. 791 * It is an internal function that does not destroy 792 * the context like *normal* final routines. 793 */ 794 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) { 795 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext, 796 aes_encrypt_block); 797 if (ret == CRYPTO_DATA_LEN_RANGE) 798 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 799 } 800 801 if (ret == CRYPTO_SUCCESS) { 802 if (ciphertext != plaintext) 803 plaintext->cd_length = 804 plaintext->cd_offset - saved_offset; 805 } else { 806 plaintext->cd_length = saved_length; 807 } 808 plaintext->cd_offset = saved_offset; 809 810 811 return (ret); 812 } 813 814 /* ARGSUSED */ 815 static int 816 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data, 817 crypto_req_handle_t req) 818 { 819 aes_ctx_t *aes_ctx; 820 int ret; 821 822 ASSERT(ctx->cc_provider_private != NULL); 823 aes_ctx = ctx->cc_provider_private; 824 825 if (data->cd_format != CRYPTO_DATA_RAW && 826 data->cd_format != CRYPTO_DATA_UIO && 827 data->cd_format != CRYPTO_DATA_MBLK) { 828 return (CRYPTO_ARGUMENTS_BAD); 829 } 830 831 if (aes_ctx->ac_flags & CTR_MODE) { 832 if (aes_ctx->ac_remainder_len > 0) { 833 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 834 aes_encrypt_block); 835 if (ret != CRYPTO_SUCCESS) 836 return (ret); 837 } 838 } else if (aes_ctx->ac_flags & CCM_MODE) { 839 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data, 840 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 841 if (ret != CRYPTO_SUCCESS) { 842 return (ret); 843 } 844 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 845 size_t saved_offset = data->cd_offset; 846 847 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data, 848 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 849 aes_xor_block); 850 if (ret != CRYPTO_SUCCESS) { 851 return (ret); 852 } 853 data->cd_length = data->cd_offset - saved_offset; 854 data->cd_offset = saved_offset; 855 } else if (aes_ctx->ac_flags & CMAC_MODE) { 856 ret = cmac_mode_final((cbc_ctx_t *)aes_ctx, data, 857 aes_encrypt_block, aes_xor_block); 858 if (ret != CRYPTO_SUCCESS) 859 return (ret); 860 data->cd_length = AES_BLOCK_LEN; 861 } else { 862 /* 863 * There must be no unprocessed plaintext. 864 * This happens if the length of the last data is 865 * not a multiple of the AES block length. 866 */ 867 if (aes_ctx->ac_remainder_len > 0) { 868 return (CRYPTO_DATA_LEN_RANGE); 869 } 870 data->cd_length = 0; 871 } 872 873 (void) aes_free_context(ctx); 874 875 return (CRYPTO_SUCCESS); 876 } 877 878 /* ARGSUSED */ 879 static int 880 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data, 881 crypto_req_handle_t req) 882 { 883 aes_ctx_t *aes_ctx; 884 int ret; 885 off_t saved_offset; 886 size_t saved_length; 887 888 ASSERT(ctx->cc_provider_private != NULL); 889 aes_ctx = ctx->cc_provider_private; 890 891 if (data->cd_format != CRYPTO_DATA_RAW && 892 data->cd_format != CRYPTO_DATA_UIO && 893 data->cd_format != CRYPTO_DATA_MBLK) { 894 return (CRYPTO_ARGUMENTS_BAD); 895 } 896 897 /* 898 * There must be no unprocessed ciphertext. 899 * This happens if the length of the last ciphertext is 900 * not a multiple of the AES block length. 901 */ 902 if (aes_ctx->ac_remainder_len > 0) { 903 if ((aes_ctx->ac_flags & CTR_MODE) == 0) 904 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 905 else { 906 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data, 907 aes_encrypt_block); 908 if (ret == CRYPTO_DATA_LEN_RANGE) 909 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 910 if (ret != CRYPTO_SUCCESS) 911 return (ret); 912 } 913 } 914 915 if (aes_ctx->ac_flags & CCM_MODE) { 916 /* 917 * This is where all the plaintext is returned, make sure 918 * the plaintext buffer is big enough 919 */ 920 size_t pt_len = aes_ctx->ac_data_len; 921 if (data->cd_length < pt_len) { 922 data->cd_length = pt_len; 923 return (CRYPTO_BUFFER_TOO_SMALL); 924 } 925 926 ASSERT(aes_ctx->ac_processed_data_len == pt_len); 927 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len); 928 saved_offset = data->cd_offset; 929 saved_length = data->cd_length; 930 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data, 931 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 932 aes_xor_block); 933 if (ret == CRYPTO_SUCCESS) { 934 data->cd_length = data->cd_offset - saved_offset; 935 } else { 936 data->cd_length = saved_length; 937 } 938 939 data->cd_offset = saved_offset; 940 if (ret != CRYPTO_SUCCESS) { 941 return (ret); 942 } 943 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) { 944 /* 945 * This is where all the plaintext is returned, make sure 946 * the plaintext buffer is big enough 947 */ 948 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx; 949 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len; 950 951 if (data->cd_length < pt_len) { 952 data->cd_length = pt_len; 953 return (CRYPTO_BUFFER_TOO_SMALL); 954 } 955 956 saved_offset = data->cd_offset; 957 saved_length = data->cd_length; 958 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data, 959 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block); 960 if (ret == CRYPTO_SUCCESS) { 961 data->cd_length = data->cd_offset - saved_offset; 962 } else { 963 data->cd_length = saved_length; 964 } 965 966 data->cd_offset = saved_offset; 967 if (ret != CRYPTO_SUCCESS) { 968 return (ret); 969 } 970 } 971 972 973 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) { 974 data->cd_length = 0; 975 } 976 977 (void) aes_free_context(ctx); 978 979 return (CRYPTO_SUCCESS); 980 } 981 982 /* ARGSUSED */ 983 static int 984 aes_encrypt_atomic(crypto_provider_handle_t provider, 985 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 986 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext, 987 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 988 { 989 aes_ctx_t aes_ctx; /* on the stack */ 990 off_t saved_offset; 991 size_t saved_length; 992 size_t length_needed; 993 int ret; 994 995 AES_ARG_INPLACE(plaintext, ciphertext); 996 997 /* 998 * CTR, CCM, CMAC, GCM, and GMAC modes do not require that plaintext 999 * be a multiple of AES block size. 1000 */ 1001 switch (mechanism->cm_type) { 1002 case AES_CTR_MECH_INFO_TYPE: 1003 case AES_CCM_MECH_INFO_TYPE: 1004 case AES_GCM_MECH_INFO_TYPE: 1005 case AES_GMAC_MECH_INFO_TYPE: 1006 case AES_CMAC_MECH_INFO_TYPE: 1007 break; 1008 default: 1009 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 1010 return (CRYPTO_DATA_LEN_RANGE); 1011 } 1012 1013 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS) 1014 return (ret); 1015 1016 bzero(&aes_ctx, sizeof (aes_ctx_t)); 1017 1018 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 1019 crypto_kmflag(req), B_TRUE); 1020 if (ret != CRYPTO_SUCCESS) 1021 return (ret); 1022 1023 switch (mechanism->cm_type) { 1024 case AES_CCM_MECH_INFO_TYPE: 1025 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len; 1026 break; 1027 case AES_GMAC_MECH_INFO_TYPE: 1028 if (plaintext->cd_length != 0) 1029 return (CRYPTO_ARGUMENTS_BAD); 1030 /* FALLTHRU */ 1031 case AES_GCM_MECH_INFO_TYPE: 1032 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len; 1033 break; 1034 case AES_CMAC_MECH_INFO_TYPE: 1035 length_needed = AES_BLOCK_LEN; 1036 break; 1037 default: 1038 length_needed = plaintext->cd_length; 1039 } 1040 1041 /* return size of buffer needed to store output */ 1042 if (ciphertext->cd_length < length_needed) { 1043 ciphertext->cd_length = length_needed; 1044 ret = CRYPTO_BUFFER_TOO_SMALL; 1045 goto out; 1046 } 1047 1048 saved_offset = ciphertext->cd_offset; 1049 saved_length = ciphertext->cd_length; 1050 1051 /* 1052 * Do an update on the specified input data. 1053 */ 1054 switch (plaintext->cd_format) { 1055 case CRYPTO_DATA_RAW: 1056 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext, 1057 aes_encrypt_contiguous_blocks, aes_copy_block64); 1058 break; 1059 case CRYPTO_DATA_UIO: 1060 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext, 1061 aes_encrypt_contiguous_blocks, aes_copy_block64); 1062 break; 1063 case CRYPTO_DATA_MBLK: 1064 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext, 1065 aes_encrypt_contiguous_blocks, aes_copy_block64); 1066 break; 1067 default: 1068 ret = CRYPTO_ARGUMENTS_BAD; 1069 } 1070 1071 if (ret == CRYPTO_SUCCESS) { 1072 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1073 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx, 1074 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 1075 aes_xor_block); 1076 if (ret != CRYPTO_SUCCESS) 1077 goto out; 1078 ASSERT(aes_ctx.ac_remainder_len == 0); 1079 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1080 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1081 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx, 1082 ciphertext, AES_BLOCK_LEN, aes_encrypt_block, 1083 aes_copy_block, aes_xor_block); 1084 if (ret != CRYPTO_SUCCESS) 1085 goto out; 1086 ASSERT(aes_ctx.ac_remainder_len == 0); 1087 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) { 1088 if (aes_ctx.ac_remainder_len > 0) { 1089 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1090 ciphertext, aes_encrypt_block); 1091 if (ret != CRYPTO_SUCCESS) 1092 goto out; 1093 } 1094 } else if (mechanism->cm_type == AES_CMAC_MECH_INFO_TYPE) { 1095 ret = cmac_mode_final((cbc_ctx_t *)&aes_ctx, 1096 ciphertext, aes_encrypt_block, 1097 aes_xor_block); 1098 if (ret != CRYPTO_SUCCESS) 1099 goto out; 1100 } else { 1101 ASSERT(aes_ctx.ac_remainder_len == 0); 1102 } 1103 1104 if (plaintext != ciphertext) { 1105 ciphertext->cd_length = 1106 ciphertext->cd_offset - saved_offset; 1107 } 1108 } else { 1109 ciphertext->cd_length = saved_length; 1110 } 1111 ciphertext->cd_offset = saved_offset; 1112 1113 out: 1114 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1115 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1116 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1117 } 1118 1119 return (ret); 1120 } 1121 1122 /* ARGSUSED */ 1123 static int 1124 aes_decrypt_atomic(crypto_provider_handle_t provider, 1125 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1126 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext, 1127 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1128 { 1129 aes_ctx_t aes_ctx; /* on the stack */ 1130 off_t saved_offset; 1131 size_t saved_length; 1132 size_t length_needed; 1133 int ret; 1134 1135 AES_ARG_INPLACE(ciphertext, plaintext); 1136 1137 /* 1138 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext 1139 * be a multiple of AES block size. 1140 */ 1141 switch (mechanism->cm_type) { 1142 case AES_CTR_MECH_INFO_TYPE: 1143 case AES_CCM_MECH_INFO_TYPE: 1144 case AES_GCM_MECH_INFO_TYPE: 1145 case AES_GMAC_MECH_INFO_TYPE: 1146 break; 1147 default: 1148 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) 1149 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE); 1150 } 1151 1152 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS) 1153 return (ret); 1154 1155 bzero(&aes_ctx, sizeof (aes_ctx_t)); 1156 1157 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key, 1158 crypto_kmflag(req), B_FALSE); 1159 if (ret != CRYPTO_SUCCESS) 1160 return (ret); 1161 1162 switch (mechanism->cm_type) { 1163 case AES_CCM_MECH_INFO_TYPE: 1164 length_needed = aes_ctx.ac_data_len; 1165 break; 1166 case AES_GCM_MECH_INFO_TYPE: 1167 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len; 1168 break; 1169 case AES_GMAC_MECH_INFO_TYPE: 1170 if (plaintext->cd_length != 0) 1171 return (CRYPTO_ARGUMENTS_BAD); 1172 length_needed = 0; 1173 break; 1174 default: 1175 length_needed = ciphertext->cd_length; 1176 } 1177 1178 /* return size of buffer needed to store output */ 1179 if (plaintext->cd_length < length_needed) { 1180 plaintext->cd_length = length_needed; 1181 ret = CRYPTO_BUFFER_TOO_SMALL; 1182 goto out; 1183 } 1184 1185 saved_offset = plaintext->cd_offset; 1186 saved_length = plaintext->cd_length; 1187 1188 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1189 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) 1190 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req)); 1191 1192 /* 1193 * Do an update on the specified input data. 1194 */ 1195 switch (ciphertext->cd_format) { 1196 case CRYPTO_DATA_RAW: 1197 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext, 1198 aes_decrypt_contiguous_blocks, aes_copy_block64); 1199 break; 1200 case CRYPTO_DATA_UIO: 1201 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext, 1202 aes_decrypt_contiguous_blocks, aes_copy_block64); 1203 break; 1204 case CRYPTO_DATA_MBLK: 1205 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext, 1206 aes_decrypt_contiguous_blocks, aes_copy_block64); 1207 break; 1208 default: 1209 ret = CRYPTO_ARGUMENTS_BAD; 1210 } 1211 1212 if (ret == CRYPTO_SUCCESS) { 1213 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) { 1214 ASSERT(aes_ctx.ac_processed_data_len 1215 == aes_ctx.ac_data_len); 1216 ASSERT(aes_ctx.ac_processed_mac_len 1217 == aes_ctx.ac_mac_len); 1218 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx, 1219 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1220 aes_copy_block, aes_xor_block); 1221 ASSERT(aes_ctx.ac_remainder_len == 0); 1222 if ((ret == CRYPTO_SUCCESS) && 1223 (ciphertext != plaintext)) { 1224 plaintext->cd_length = 1225 plaintext->cd_offset - saved_offset; 1226 } else { 1227 plaintext->cd_length = saved_length; 1228 } 1229 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE || 1230 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1231 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx, 1232 plaintext, AES_BLOCK_LEN, aes_encrypt_block, 1233 aes_xor_block); 1234 ASSERT(aes_ctx.ac_remainder_len == 0); 1235 if ((ret == CRYPTO_SUCCESS) && 1236 (ciphertext != plaintext)) { 1237 plaintext->cd_length = 1238 plaintext->cd_offset - saved_offset; 1239 } else { 1240 plaintext->cd_length = saved_length; 1241 } 1242 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) { 1243 ASSERT(aes_ctx.ac_remainder_len == 0); 1244 if (ciphertext != plaintext) 1245 plaintext->cd_length = 1246 plaintext->cd_offset - saved_offset; 1247 } else { 1248 if (aes_ctx.ac_remainder_len > 0) { 1249 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx, 1250 plaintext, aes_encrypt_block); 1251 if (ret == CRYPTO_DATA_LEN_RANGE) 1252 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE; 1253 if (ret != CRYPTO_SUCCESS) 1254 goto out; 1255 } 1256 if (ciphertext != plaintext) 1257 plaintext->cd_length = 1258 plaintext->cd_offset - saved_offset; 1259 } 1260 } else { 1261 plaintext->cd_length = saved_length; 1262 } 1263 plaintext->cd_offset = saved_offset; 1264 1265 out: 1266 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1267 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1268 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len); 1269 } 1270 1271 if (aes_ctx.ac_flags & CCM_MODE) { 1272 if (aes_ctx.ac_pt_buf != NULL) { 1273 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len); 1274 } 1275 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) { 1276 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) { 1277 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf, 1278 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len); 1279 } 1280 } 1281 1282 return (ret); 1283 } 1284 1285 /* 1286 * KCF software provider context template entry points. 1287 */ 1288 /* ARGSUSED */ 1289 static int 1290 aes_create_ctx_template(crypto_provider_handle_t provider, 1291 crypto_mechanism_t *mechanism, crypto_key_t *key, 1292 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req) 1293 { 1294 void *keysched; 1295 size_t size; 1296 int rv; 1297 1298 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE && 1299 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE && 1300 mechanism->cm_type != AES_CMAC_MECH_INFO_TYPE && 1301 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE && 1302 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE && 1303 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE && 1304 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE) 1305 return (CRYPTO_MECHANISM_INVALID); 1306 1307 if ((keysched = aes_alloc_keysched(&size, 1308 crypto_kmflag(req))) == NULL) { 1309 return (CRYPTO_HOST_MEMORY); 1310 } 1311 1312 /* 1313 * Initialize key schedule. Key length information is stored 1314 * in the key. 1315 */ 1316 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1317 bzero(keysched, size); 1318 kmem_free(keysched, size); 1319 return (rv); 1320 } 1321 1322 *tmpl = keysched; 1323 *tmpl_size = size; 1324 1325 return (CRYPTO_SUCCESS); 1326 } 1327 1328 1329 static int 1330 aes_free_context(crypto_ctx_t *ctx) 1331 { 1332 aes_ctx_t *aes_ctx = ctx->cc_provider_private; 1333 1334 if (aes_ctx != NULL) { 1335 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1336 ASSERT(aes_ctx->ac_keysched_len != 0); 1337 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len); 1338 kmem_free(aes_ctx->ac_keysched, 1339 aes_ctx->ac_keysched_len); 1340 } 1341 crypto_free_mode_ctx(aes_ctx); 1342 ctx->cc_provider_private = NULL; 1343 } 1344 1345 return (CRYPTO_SUCCESS); 1346 } 1347 1348 1349 static int 1350 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template, 1351 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag, 1352 boolean_t is_encrypt_init) 1353 { 1354 int rv = CRYPTO_SUCCESS; 1355 void *keysched; 1356 size_t size; 1357 1358 if (template == NULL) { 1359 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL) 1360 return (CRYPTO_HOST_MEMORY); 1361 /* 1362 * Initialize key schedule. 1363 * Key length is stored in the key. 1364 */ 1365 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) { 1366 kmem_free(keysched, size); 1367 return (rv); 1368 } 1369 1370 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE; 1371 aes_ctx->ac_keysched_len = size; 1372 } else { 1373 keysched = template; 1374 } 1375 aes_ctx->ac_keysched = keysched; 1376 1377 switch (mechanism->cm_type) { 1378 case AES_CBC_MECH_INFO_TYPE: 1379 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param, 1380 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64); 1381 break; 1382 case AES_CMAC_MECH_INFO_TYPE: 1383 rv = cmac_init_ctx((cbc_ctx_t *)aes_ctx, AES_BLOCK_LEN); 1384 break; 1385 case AES_CTR_MECH_INFO_TYPE: { 1386 CK_AES_CTR_PARAMS *pp; 1387 1388 if (mechanism->cm_param == NULL || 1389 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) { 1390 return (CRYPTO_MECHANISM_PARAM_INVALID); 1391 } 1392 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param; 1393 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits, 1394 pp->cb, aes_copy_block); 1395 break; 1396 } 1397 case AES_CCM_MECH_INFO_TYPE: 1398 if (mechanism->cm_param == NULL || 1399 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) { 1400 return (CRYPTO_MECHANISM_PARAM_INVALID); 1401 } 1402 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param, 1403 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block, 1404 aes_xor_block); 1405 break; 1406 case AES_GCM_MECH_INFO_TYPE: 1407 if (mechanism->cm_param == NULL || 1408 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) { 1409 return (CRYPTO_MECHANISM_PARAM_INVALID); 1410 } 1411 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1412 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1413 aes_xor_block); 1414 break; 1415 case AES_GMAC_MECH_INFO_TYPE: 1416 if (mechanism->cm_param == NULL || 1417 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) { 1418 return (CRYPTO_MECHANISM_PARAM_INVALID); 1419 } 1420 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param, 1421 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block, 1422 aes_xor_block); 1423 break; 1424 case AES_ECB_MECH_INFO_TYPE: 1425 aes_ctx->ac_flags |= ECB_MODE; 1426 } 1427 1428 if (rv != CRYPTO_SUCCESS) { 1429 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) { 1430 bzero(keysched, size); 1431 kmem_free(keysched, size); 1432 } 1433 } 1434 1435 return (rv); 1436 } 1437 1438 static int 1439 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data, 1440 CK_AES_GCM_PARAMS *gcm_params) 1441 { 1442 /* LINTED: pointer alignment */ 1443 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param; 1444 1445 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE) 1446 return (CRYPTO_MECHANISM_INVALID); 1447 1448 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) 1449 return (CRYPTO_MECHANISM_PARAM_INVALID); 1450 1451 if (params->pIv == NULL) 1452 return (CRYPTO_MECHANISM_PARAM_INVALID); 1453 1454 gcm_params->pIv = params->pIv; 1455 gcm_params->ulIvLen = AES_GMAC_IV_LEN; 1456 gcm_params->ulTagBits = AES_GMAC_TAG_BITS; 1457 1458 if (data == NULL) 1459 return (CRYPTO_SUCCESS); 1460 1461 if (data->cd_format != CRYPTO_DATA_RAW) 1462 return (CRYPTO_ARGUMENTS_BAD); 1463 1464 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base; 1465 gcm_params->ulAADLen = data->cd_length; 1466 return (CRYPTO_SUCCESS); 1467 } 1468 1469 static int 1470 aes_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism, 1471 crypto_key_t *key, crypto_spi_ctx_template_t template, 1472 crypto_req_handle_t req) 1473 { 1474 return (aes_encrypt_init(ctx, mechanism, 1475 key, template, req)); 1476 } 1477 1478 static int 1479 aes_mac(crypto_ctx_t *ctx, crypto_data_t *plaintext, crypto_data_t *ciphertext, 1480 crypto_req_handle_t req) 1481 { 1482 return (aes_encrypt(ctx, plaintext, ciphertext, req)); 1483 } 1484 1485 static int 1486 aes_mac_update(crypto_ctx_t *ctx, crypto_data_t *data, 1487 crypto_req_handle_t req) 1488 { 1489 crypto_data_t out; 1490 uint8_t block[AES_BLOCK_LEN]; 1491 out.cd_format = CRYPTO_DATA_RAW; 1492 out.cd_offset = 0; 1493 out.cd_length = sizeof (block); 1494 out.cd_miscdata = NULL; 1495 out.cd_raw.iov_base = (void *)block; 1496 out.cd_raw.iov_len = sizeof (block); 1497 1498 return (aes_encrypt_update(ctx, data, &out, req)); 1499 } 1500 1501 static int 1502 aes_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req) 1503 { 1504 return (aes_encrypt_final(ctx, mac, req)); 1505 } 1506 1507 static int 1508 aes_mac_atomic(crypto_provider_handle_t provider, 1509 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1510 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1511 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1512 { 1513 CK_AES_GCM_PARAMS gcm_params; 1514 crypto_mechanism_t gcm_mech; 1515 int rv; 1516 1517 if (mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1518 if ((rv = process_gmac_mech(mechanism, data, &gcm_params)) 1519 != CRYPTO_SUCCESS) 1520 return (rv); 1521 1522 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE; 1523 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS); 1524 gcm_mech.cm_param = (char *)&gcm_params; 1525 1526 return (aes_encrypt_atomic(provider, session_id, &gcm_mech, 1527 key, &null_crypto_data, mac, template, req)); 1528 } 1529 /* CMAC */ 1530 return (aes_encrypt_atomic(provider, session_id, mechanism, 1531 key, data, mac, template, req)); 1532 } 1533 1534 static int 1535 aes_mac_verify_atomic(crypto_provider_handle_t provider, 1536 crypto_session_id_t session_id, crypto_mechanism_t *mechanism, 1537 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac, 1538 crypto_spi_ctx_template_t template, crypto_req_handle_t req) 1539 { 1540 CK_AES_GCM_PARAMS gcm_params; 1541 crypto_mechanism_t gcm_mech; 1542 crypto_data_t data_mac; 1543 char buf[AES_BLOCK_LEN]; 1544 int rv; 1545 1546 if (mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) { 1547 if ((rv = process_gmac_mech(mechanism, data, &gcm_params)) 1548 != CRYPTO_SUCCESS) 1549 return (rv); 1550 1551 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE; 1552 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS); 1553 gcm_mech.cm_param = (char *)&gcm_params; 1554 1555 return (aes_decrypt_atomic(provider, session_id, &gcm_mech, 1556 key, mac, &null_crypto_data, template, req)); 1557 } 1558 1559 /* CMAC */ 1560 1561 data_mac.cd_format = CRYPTO_DATA_RAW; 1562 data_mac.cd_offset = 0; 1563 data_mac.cd_length = AES_BLOCK_LEN; 1564 data_mac.cd_miscdata = NULL; 1565 data_mac.cd_raw.iov_base = (void *) buf; 1566 data_mac.cd_raw.iov_len = AES_BLOCK_LEN; 1567 1568 rv = aes_encrypt_atomic(provider, session_id, &gcm_mech, 1569 key, data, &data_mac, template, req); 1570 1571 if (rv != CRYPTO_SUCCESS) 1572 return (rv); 1573 1574 /* should use get_input_data for mac? */ 1575 if (bcmp(buf, mac->cd_raw.iov_base + mac->cd_offset, 1576 AES_BLOCK_LEN) != 0) 1577 return (CRYPTO_INVALID_MAC); 1578 1579 return (CRYPTO_SUCCESS); 1580 } 1581