1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2009 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 26 #ifndef _KERNEL 27 #include <strings.h> 28 #include <limits.h> 29 #include <assert.h> 30 #include <security/cryptoki.h> 31 #endif 32 33 #include <sys/types.h> 34 #include <sys/kmem.h> 35 #include <modes/modes.h> 36 #include <sys/crypto/common.h> 37 #include <sys/crypto/impl.h> 38 #include <sys/byteorder.h> 39 40 struct aes_block { 41 uint64_t a; 42 uint64_t b; 43 }; 44 45 void 46 gcm_mul(uint64_t *x_in, uint64_t *y, uint64_t *res) 47 { 48 uint64_t R = { 0xe100000000000000ULL }; 49 struct aes_block z = { 0, 0 }; 50 struct aes_block v; 51 uint64_t x; 52 int i, j; 53 54 v.a = ntohll(y[0]); 55 v.b = ntohll(y[1]); 56 57 for (j = 0; j < 2; j++) { 58 x = ntohll(x_in[j]); 59 for (i = 0; i < 64; i++, x <<= 1) { 60 if (x & 0x8000000000000000ULL) { 61 z.a ^= v.a; 62 z.b ^= v.b; 63 } 64 if (v.b & 1ULL) { 65 v.b = (v.a << 63)|(v.b >> 1); 66 v.a = (v.a >> 1) ^ R; 67 } else { 68 v.b = (v.a << 63)|(v.b >> 1); 69 v.a = v.a >> 1; 70 } 71 } 72 } 73 res[0] = htonll(z.a); 74 res[1] = htonll(z.b); 75 } 76 77 #define GHASH(c, d, t) \ 78 xor_block((uint8_t *)(d), (uint8_t *)(c)->gcm_ghash); \ 79 gcm_mul((uint64_t *)(c)->gcm_ghash, (c)->gcm_H, (uint64_t *)(t)); 80 81 /* 82 * Encrypt multiple blocks of data in GCM mode. Decrypt for GCM mode 83 * is done in another function. 84 */ 85 int 86 gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length, 87 crypto_data_t *out, size_t block_size, 88 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *), 89 void (*copy_block)(uint8_t *, uint8_t *), 90 void (*xor_block)(uint8_t *, uint8_t *)) 91 { 92 size_t remainder = length; 93 size_t need; 94 uint8_t *datap = (uint8_t *)data; 95 uint8_t *blockp; 96 uint8_t *lastp; 97 void *iov_or_mp; 98 offset_t offset; 99 uint8_t *out_data_1; 100 uint8_t *out_data_2; 101 size_t out_data_1_len; 102 uint64_t counter; 103 uint64_t counter_mask = ntohll(0x00000000ffffffffULL); 104 105 if (length + ctx->gcm_remainder_len < block_size) { 106 /* accumulate bytes here and return */ 107 bcopy(datap, 108 (uint8_t *)ctx->gcm_remainder + ctx->gcm_remainder_len, 109 length); 110 ctx->gcm_remainder_len += length; 111 ctx->gcm_copy_to = datap; 112 return (CRYPTO_SUCCESS); 113 } 114 115 lastp = (uint8_t *)ctx->gcm_cb; 116 if (out != NULL) 117 crypto_init_ptrs(out, &iov_or_mp, &offset); 118 119 do { 120 /* Unprocessed data from last call. */ 121 if (ctx->gcm_remainder_len > 0) { 122 need = block_size - ctx->gcm_remainder_len; 123 124 if (need > remainder) 125 return (CRYPTO_DATA_LEN_RANGE); 126 127 bcopy(datap, &((uint8_t *)ctx->gcm_remainder) 128 [ctx->gcm_remainder_len], need); 129 130 blockp = (uint8_t *)ctx->gcm_remainder; 131 } else { 132 blockp = datap; 133 } 134 135 /* 136 * Increment counter. Counter bits are confined 137 * to the bottom 32 bits of the counter block. 138 */ 139 counter = ntohll(ctx->gcm_cb[1] & counter_mask); 140 counter = htonll(counter + 1); 141 counter &= counter_mask; 142 ctx->gcm_cb[1] = (ctx->gcm_cb[1] & ~counter_mask) | counter; 143 144 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_cb, 145 (uint8_t *)ctx->gcm_tmp); 146 xor_block(blockp, (uint8_t *)ctx->gcm_tmp); 147 148 lastp = (uint8_t *)ctx->gcm_tmp; 149 150 ctx->gcm_processed_data_len += block_size; 151 152 if (out == NULL) { 153 if (ctx->gcm_remainder_len > 0) { 154 bcopy(blockp, ctx->gcm_copy_to, 155 ctx->gcm_remainder_len); 156 bcopy(blockp + ctx->gcm_remainder_len, datap, 157 need); 158 } 159 } else { 160 crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1, 161 &out_data_1_len, &out_data_2, block_size); 162 163 /* copy block to where it belongs */ 164 if (out_data_1_len == block_size) { 165 copy_block(lastp, out_data_1); 166 } else { 167 bcopy(lastp, out_data_1, out_data_1_len); 168 if (out_data_2 != NULL) { 169 bcopy(lastp + out_data_1_len, 170 out_data_2, 171 block_size - out_data_1_len); 172 } 173 } 174 /* update offset */ 175 out->cd_offset += block_size; 176 } 177 178 /* add ciphertext to the hash */ 179 GHASH(ctx, ctx->gcm_tmp, ctx->gcm_ghash); 180 181 /* Update pointer to next block of data to be processed. */ 182 if (ctx->gcm_remainder_len != 0) { 183 datap += need; 184 ctx->gcm_remainder_len = 0; 185 } else { 186 datap += block_size; 187 } 188 189 remainder = (size_t)&data[length] - (size_t)datap; 190 191 /* Incomplete last block. */ 192 if (remainder > 0 && remainder < block_size) { 193 bcopy(datap, ctx->gcm_remainder, remainder); 194 ctx->gcm_remainder_len = remainder; 195 ctx->gcm_copy_to = datap; 196 goto out; 197 } 198 ctx->gcm_copy_to = NULL; 199 200 } while (remainder > 0); 201 out: 202 return (CRYPTO_SUCCESS); 203 } 204 205 /* ARGSUSED */ 206 int 207 gcm_encrypt_final(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size, 208 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *), 209 void (*copy_block)(uint8_t *, uint8_t *), 210 void (*xor_block)(uint8_t *, uint8_t *)) 211 { 212 uint64_t counter_mask = ntohll(0x00000000ffffffffULL); 213 uint8_t *ghash, *macp; 214 int i, rv; 215 216 if (out->cd_length < 217 (ctx->gcm_remainder_len + ctx->gcm_tag_len)) { 218 return (CRYPTO_DATA_LEN_RANGE); 219 } 220 221 ghash = (uint8_t *)ctx->gcm_ghash; 222 223 if (ctx->gcm_remainder_len > 0) { 224 uint64_t counter; 225 uint8_t *tmpp = (uint8_t *)ctx->gcm_tmp; 226 227 /* 228 * Here is where we deal with data that is not a 229 * multiple of the block size. 230 */ 231 232 /* 233 * Increment counter. 234 */ 235 counter = ntohll(ctx->gcm_cb[1] & counter_mask); 236 counter = htonll(counter + 1); 237 counter &= counter_mask; 238 ctx->gcm_cb[1] = (ctx->gcm_cb[1] & ~counter_mask) | counter; 239 240 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_cb, 241 (uint8_t *)ctx->gcm_tmp); 242 243 macp = (uint8_t *)ctx->gcm_remainder; 244 bzero(macp + ctx->gcm_remainder_len, 245 block_size - ctx->gcm_remainder_len); 246 247 /* XOR with counter block */ 248 for (i = 0; i < ctx->gcm_remainder_len; i++) { 249 macp[i] ^= tmpp[i]; 250 } 251 252 /* add ciphertext to the hash */ 253 GHASH(ctx, macp, ghash); 254 255 ctx->gcm_processed_data_len += ctx->gcm_remainder_len; 256 } 257 258 ctx->gcm_len_a_len_c[1] = htonll(ctx->gcm_processed_data_len << 3); 259 GHASH(ctx, ctx->gcm_len_a_len_c, ghash); 260 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_J0, 261 (uint8_t *)ctx->gcm_J0); 262 xor_block((uint8_t *)ctx->gcm_J0, ghash); 263 264 if (ctx->gcm_remainder_len > 0) { 265 rv = crypto_put_output_data(macp, out, ctx->gcm_remainder_len); 266 if (rv != CRYPTO_SUCCESS) 267 return (rv); 268 } 269 out->cd_offset += ctx->gcm_remainder_len; 270 ctx->gcm_remainder_len = 0; 271 rv = crypto_put_output_data(ghash, out, ctx->gcm_tag_len); 272 if (rv != CRYPTO_SUCCESS) 273 return (rv); 274 out->cd_offset += ctx->gcm_tag_len; 275 276 return (CRYPTO_SUCCESS); 277 } 278 279 /* 280 * This will only deal with decrypting the last block of the input that 281 * might not be a multiple of block length. 282 */ 283 static void 284 gcm_decrypt_incomplete_block(gcm_ctx_t *ctx, size_t block_size, size_t index, 285 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *), 286 void (*xor_block)(uint8_t *, uint8_t *)) 287 { 288 uint8_t *datap, *outp, *counterp; 289 uint64_t counter; 290 uint64_t counter_mask = ntohll(0x00000000ffffffffULL); 291 int i; 292 293 /* 294 * Increment counter. 295 * Counter bits are confined to the bottom 32 bits 296 */ 297 counter = ntohll(ctx->gcm_cb[1] & counter_mask); 298 counter = htonll(counter + 1); 299 counter &= counter_mask; 300 ctx->gcm_cb[1] = (ctx->gcm_cb[1] & ~counter_mask) | counter; 301 302 datap = (uint8_t *)ctx->gcm_remainder; 303 outp = &((ctx->gcm_pt_buf)[index]); 304 counterp = (uint8_t *)ctx->gcm_tmp; 305 306 /* authentication tag */ 307 bzero((uint8_t *)ctx->gcm_tmp, block_size); 308 bcopy(datap, (uint8_t *)ctx->gcm_tmp, ctx->gcm_remainder_len); 309 310 /* add ciphertext to the hash */ 311 GHASH(ctx, ctx->gcm_tmp, ctx->gcm_ghash); 312 313 /* decrypt remaining ciphertext */ 314 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_cb, counterp); 315 316 /* XOR with counter block */ 317 for (i = 0; i < ctx->gcm_remainder_len; i++) { 318 outp[i] = datap[i] ^ counterp[i]; 319 } 320 } 321 322 /* ARGSUSED */ 323 int 324 gcm_mode_decrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length, 325 crypto_data_t *out, size_t block_size, 326 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *), 327 void (*copy_block)(uint8_t *, uint8_t *), 328 void (*xor_block)(uint8_t *, uint8_t *)) 329 { 330 size_t new_len; 331 uint8_t *new; 332 333 /* 334 * Copy contiguous ciphertext input blocks to plaintext buffer. 335 * Ciphertext will be decrypted in the final. 336 */ 337 if (length > 0) { 338 new_len = ctx->gcm_pt_buf_len + length; 339 #ifdef _KERNEL 340 new = kmem_alloc(new_len, ctx->gcm_kmflag); 341 bcopy(ctx->gcm_pt_buf, new, ctx->gcm_pt_buf_len); 342 kmem_free(ctx->gcm_pt_buf, ctx->gcm_pt_buf_len); 343 #else 344 new = malloc(new_len); 345 bcopy(ctx->gcm_pt_buf, new, ctx->gcm_pt_buf_len); 346 free(ctx->gcm_pt_buf); 347 #endif 348 if (new == NULL) 349 return (CRYPTO_HOST_MEMORY); 350 351 ctx->gcm_pt_buf = new; 352 ctx->gcm_pt_buf_len = new_len; 353 bcopy(data, &ctx->gcm_pt_buf[ctx->gcm_processed_data_len], 354 length); 355 ctx->gcm_processed_data_len += length; 356 } 357 358 ctx->gcm_remainder_len = 0; 359 return (CRYPTO_SUCCESS); 360 } 361 362 int 363 gcm_decrypt_final(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size, 364 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *), 365 void (*xor_block)(uint8_t *, uint8_t *)) 366 { 367 size_t pt_len; 368 size_t remainder; 369 uint8_t *ghash; 370 uint8_t *blockp; 371 uint8_t *cbp; 372 uint64_t counter; 373 uint64_t counter_mask = ntohll(0x00000000ffffffffULL); 374 int processed = 0, rv; 375 376 ASSERT(ctx->gcm_processed_data_len == ctx->gcm_pt_buf_len); 377 378 pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len; 379 ghash = (uint8_t *)ctx->gcm_ghash; 380 blockp = ctx->gcm_pt_buf; 381 remainder = pt_len; 382 while (remainder > 0) { 383 /* add ciphertext to the hash */ 384 GHASH(ctx, blockp, ghash); 385 386 /* 387 * Increment counter. 388 * Counter bits are confined to the bottom 32 bits 389 */ 390 counter = ntohll(ctx->gcm_cb[1] & counter_mask); 391 counter = htonll(counter + 1); 392 counter &= counter_mask; 393 ctx->gcm_cb[1] = (ctx->gcm_cb[1] & ~counter_mask) | counter; 394 395 cbp = (uint8_t *)ctx->gcm_tmp; 396 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_cb, cbp); 397 398 /* XOR with ciphertext */ 399 xor_block(cbp, blockp); 400 401 processed += block_size; 402 blockp += block_size; 403 remainder -= block_size; 404 405 /* Incomplete last block */ 406 if (remainder > 0 && remainder < block_size) { 407 bcopy(blockp, ctx->gcm_remainder, remainder); 408 ctx->gcm_remainder_len = remainder; 409 /* 410 * not expecting anymore ciphertext, just 411 * compute plaintext for the remaining input 412 */ 413 gcm_decrypt_incomplete_block(ctx, block_size, 414 processed, encrypt_block, xor_block); 415 ctx->gcm_remainder_len = 0; 416 goto out; 417 } 418 } 419 out: 420 ctx->gcm_len_a_len_c[1] = htonll(pt_len << 3); 421 GHASH(ctx, ctx->gcm_len_a_len_c, ghash); 422 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_J0, 423 (uint8_t *)ctx->gcm_J0); 424 xor_block((uint8_t *)ctx->gcm_J0, ghash); 425 426 /* compare the input authentication tag with what we calculated */ 427 if (bcmp(&ctx->gcm_pt_buf[pt_len], ghash, ctx->gcm_tag_len)) { 428 /* They don't match */ 429 return (CRYPTO_INVALID_MAC); 430 } else { 431 rv = crypto_put_output_data(ctx->gcm_pt_buf, out, pt_len); 432 if (rv != CRYPTO_SUCCESS) 433 return (rv); 434 out->cd_offset += pt_len; 435 } 436 return (CRYPTO_SUCCESS); 437 } 438 439 static int 440 gcm_validate_args(CK_AES_GCM_PARAMS *gcm_param) 441 { 442 size_t tag_len; 443 444 /* 445 * Check the length of the authentication tag (in bits). 446 */ 447 tag_len = gcm_param->ulTagBits; 448 switch (tag_len) { 449 case 32: 450 case 64: 451 case 96: 452 case 104: 453 case 112: 454 case 120: 455 case 128: 456 break; 457 default: 458 return (CRYPTO_MECHANISM_PARAM_INVALID); 459 } 460 461 if (gcm_param->ulIvLen == 0) 462 return (CRYPTO_MECHANISM_PARAM_INVALID); 463 464 return (CRYPTO_SUCCESS); 465 } 466 467 static void 468 gcm_format_initial_blocks(uchar_t *iv, ulong_t iv_len, 469 gcm_ctx_t *ctx, size_t block_size, 470 void (*copy_block)(uint8_t *, uint8_t *), 471 void (*xor_block)(uint8_t *, uint8_t *)) 472 { 473 uint8_t *cb; 474 ulong_t remainder = iv_len; 475 ulong_t processed = 0; 476 uint8_t *datap, *ghash; 477 uint64_t len_a_len_c[2]; 478 479 ghash = (uint8_t *)ctx->gcm_ghash; 480 cb = (uint8_t *)ctx->gcm_cb; 481 if (iv_len == 12) { 482 bcopy(iv, cb, 12); 483 cb[12] = 0; 484 cb[13] = 0; 485 cb[14] = 0; 486 cb[15] = 1; 487 /* J0 will be used again in the final */ 488 copy_block(cb, (uint8_t *)ctx->gcm_J0); 489 } else { 490 /* GHASH the IV */ 491 do { 492 if (remainder < block_size) { 493 bzero(cb, block_size); 494 bcopy(&(iv[processed]), cb, remainder); 495 datap = (uint8_t *)cb; 496 remainder = 0; 497 } else { 498 datap = (uint8_t *)(&(iv[processed])); 499 processed += block_size; 500 remainder -= block_size; 501 } 502 GHASH(ctx, datap, ghash); 503 } while (remainder > 0); 504 505 len_a_len_c[0] = 0; 506 len_a_len_c[1] = htonll(iv_len << 3); 507 GHASH(ctx, len_a_len_c, ctx->gcm_J0); 508 509 /* J0 will be used again in the final */ 510 copy_block((uint8_t *)ctx->gcm_J0, (uint8_t *)cb); 511 } 512 } 513 514 /* 515 * The following function is called at encrypt or decrypt init time 516 * for AES GCM mode. 517 */ 518 int 519 gcm_init(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len, 520 unsigned char *auth_data, size_t auth_data_len, size_t block_size, 521 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *), 522 void (*copy_block)(uint8_t *, uint8_t *), 523 void (*xor_block)(uint8_t *, uint8_t *)) 524 { 525 uint8_t *ghash, *datap, *authp; 526 size_t remainder, processed; 527 528 /* encrypt zero block to get subkey H */ 529 bzero(ctx->gcm_H, sizeof (ctx->gcm_H)); 530 encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_H, 531 (uint8_t *)ctx->gcm_H); 532 533 gcm_format_initial_blocks(iv, iv_len, ctx, block_size, 534 copy_block, xor_block); 535 536 authp = (uint8_t *)ctx->gcm_tmp; 537 ghash = (uint8_t *)ctx->gcm_ghash; 538 bzero(authp, block_size); 539 bzero(ghash, block_size); 540 541 processed = 0; 542 remainder = auth_data_len; 543 do { 544 if (remainder < block_size) { 545 /* 546 * There's not a block full of data, pad rest of 547 * buffer with zero 548 */ 549 bzero(authp, block_size); 550 bcopy(&(auth_data[processed]), authp, remainder); 551 datap = (uint8_t *)authp; 552 remainder = 0; 553 } else { 554 datap = (uint8_t *)(&(auth_data[processed])); 555 processed += block_size; 556 remainder -= block_size; 557 } 558 559 /* add auth data to the hash */ 560 GHASH(ctx, datap, ghash); 561 562 } while (remainder > 0); 563 564 return (CRYPTO_SUCCESS); 565 } 566 567 int 568 gcm_init_ctx(gcm_ctx_t *gcm_ctx, char *param, size_t block_size, 569 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *), 570 void (*copy_block)(uint8_t *, uint8_t *), 571 void (*xor_block)(uint8_t *, uint8_t *)) 572 { 573 int rv; 574 CK_AES_GCM_PARAMS *gcm_param; 575 576 if (param != NULL) { 577 gcm_param = (CK_AES_GCM_PARAMS *)param; 578 579 if ((rv = gcm_validate_args(gcm_param)) != 0) { 580 return (rv); 581 } 582 583 gcm_ctx->gcm_tag_len = gcm_param->ulTagBits; 584 gcm_ctx->gcm_tag_len >>= 3; 585 gcm_ctx->gcm_processed_data_len = 0; 586 587 /* these values are in bits */ 588 gcm_ctx->gcm_len_a_len_c[0] = htonll(gcm_param->ulAADLen << 3); 589 590 rv = CRYPTO_SUCCESS; 591 gcm_ctx->gcm_flags |= GCM_MODE; 592 } else { 593 rv = CRYPTO_MECHANISM_PARAM_INVALID; 594 goto out; 595 } 596 597 if (gcm_init(gcm_ctx, gcm_param->pIv, gcm_param->ulIvLen, 598 gcm_param->pAAD, gcm_param->ulAADLen, block_size, 599 encrypt_block, copy_block, xor_block) != 0) { 600 rv = CRYPTO_MECHANISM_PARAM_INVALID; 601 } 602 out: 603 return (rv); 604 } 605 606 void * 607 gcm_alloc_ctx(int kmflag) 608 { 609 gcm_ctx_t *gcm_ctx; 610 611 #ifdef _KERNEL 612 if ((gcm_ctx = kmem_zalloc(sizeof (gcm_ctx_t), kmflag)) == NULL) 613 #else 614 if ((gcm_ctx = calloc(1, sizeof (gcm_ctx_t))) == NULL) 615 #endif 616 return (NULL); 617 618 gcm_ctx->gcm_flags = GCM_MODE; 619 return (gcm_ctx); 620 } 621 622 void 623 gcm_set_kmflag(gcm_ctx_t *ctx, int kmflag) 624 { 625 ctx->gcm_kmflag = kmflag; 626 } 627