1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 */
24
25 /*
26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 */
28
29 #include <sys/types.h>
30 #include <sys/systm.h>
31 #include <sys/modctl.h>
32 #include <sys/cmn_err.h>
33 #include <sys/ddi.h>
34 #include <sys/crypto/common.h>
35 #include <sys/crypto/impl.h>
36 #include <sys/crypto/spi.h>
37 #include <sys/sysmacros.h>
38 #include <sys/strsun.h>
39 #include <modes/modes.h>
40 #define _AES_IMPL
41 #include <aes/aes_impl.h>
42
43 extern struct mod_ops mod_cryptoops;
44
45 /*
46 * Module linkage information for the kernel.
47 */
48 static struct modlcrypto modlcrypto = {
49 &mod_cryptoops,
50 "AES Kernel SW Provider"
51 };
52
53 static struct modlinkage modlinkage = {
54 MODREV_1,
55 (void *)&modlcrypto,
56 NULL
57 };
58
59 /*
60 * Mechanism info structure passed to KCF during registration.
61 */
62 static crypto_mech_info_t aes_mech_info_tab[] = {
63 /* AES_ECB */
64 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
65 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
66 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
67 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
68 /* AES_CBC */
69 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
70 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
71 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
72 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
73 /* AES_CTR */
74 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
75 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
76 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
77 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
78 /* AES_CCM */
79 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
80 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
81 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
82 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
83 /* AES_GCM */
84 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
85 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
86 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
87 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
88 /* AES_GMAC */
89 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
90 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
91 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
92 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
93 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
94 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
95 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
96 };
97
98 /* operations are in-place if the output buffer is NULL */
99 #define AES_ARG_INPLACE(input, output) \
100 if ((output) == NULL) \
101 (output) = (input);
102
103 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
104
105 static crypto_control_ops_t aes_control_ops = {
106 aes_provider_status
107 };
108
109 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
110 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
111 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
112 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
113 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
114 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
115 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
116 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
117 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
118 crypto_req_handle_t);
119 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
120 crypto_req_handle_t);
121
122 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
123 crypto_req_handle_t);
124 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
125 crypto_data_t *, crypto_req_handle_t);
126 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
127 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
128 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
129
130 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
131 crypto_req_handle_t);
132 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
133 crypto_data_t *, crypto_req_handle_t);
134 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
135 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
136 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
137
138 static crypto_cipher_ops_t aes_cipher_ops = {
139 aes_encrypt_init,
140 aes_encrypt,
141 aes_encrypt_update,
142 aes_encrypt_final,
143 aes_encrypt_atomic,
144 aes_decrypt_init,
145 aes_decrypt,
146 aes_decrypt_update,
147 aes_decrypt_final,
148 aes_decrypt_atomic
149 };
150
151 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
152 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
153 crypto_spi_ctx_template_t, crypto_req_handle_t);
154 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
155 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
156 crypto_spi_ctx_template_t, crypto_req_handle_t);
157
158 static crypto_mac_ops_t aes_mac_ops = {
159 NULL,
160 NULL,
161 NULL,
162 NULL,
163 aes_mac_atomic,
164 aes_mac_verify_atomic
165 };
166
167 static int aes_create_ctx_template(crypto_provider_handle_t,
168 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
169 size_t *, crypto_req_handle_t);
170 static int aes_free_context(crypto_ctx_t *);
171
172 static crypto_ctx_ops_t aes_ctx_ops = {
173 aes_create_ctx_template,
174 aes_free_context
175 };
176
177 static crypto_ops_t aes_crypto_ops = {
178 &aes_control_ops,
179 NULL,
180 &aes_cipher_ops,
181 &aes_mac_ops,
182 NULL,
183 NULL,
184 NULL,
185 NULL,
186 NULL,
187 NULL,
188 NULL,
189 NULL,
190 NULL,
191 &aes_ctx_ops,
192 NULL,
193 NULL,
194 NULL,
195 };
196
197 static crypto_provider_info_t aes_prov_info = {
198 CRYPTO_SPI_VERSION_4,
199 "AES Software Provider",
200 CRYPTO_SW_PROVIDER,
201 {&modlinkage},
202 NULL,
203 &aes_crypto_ops,
204 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
205 aes_mech_info_tab
206 };
207
208 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
209 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
210
211 int
_init(void)212 _init(void)
213 {
214 int ret;
215
216 if ((ret = mod_install(&modlinkage)) != 0)
217 return (ret);
218
219 /* Register with KCF. If the registration fails, remove the module. */
220 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
221 (void) mod_remove(&modlinkage);
222 return (EACCES);
223 }
224
225 return (0);
226 }
227
228 int
_fini(void)229 _fini(void)
230 {
231 /* Unregister from KCF if module is registered */
232 if (aes_prov_handle != NULL) {
233 if (crypto_unregister_provider(aes_prov_handle))
234 return (EBUSY);
235
236 aes_prov_handle = NULL;
237 }
238
239 return (mod_remove(&modlinkage));
240 }
241
242 int
_info(struct modinfo * modinfop)243 _info(struct modinfo *modinfop)
244 {
245 return (mod_info(&modlinkage, modinfop));
246 }
247
248
249 static int
aes_check_mech_param(crypto_mechanism_t * mechanism,aes_ctx_t ** ctx,int kmflag)250 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
251 {
252 void *p = NULL;
253 boolean_t param_required = B_TRUE;
254 size_t param_len;
255 void *(*alloc_fun)(int);
256 int rv = CRYPTO_SUCCESS;
257
258 switch (mechanism->cm_type) {
259 case AES_ECB_MECH_INFO_TYPE:
260 param_required = B_FALSE;
261 alloc_fun = ecb_alloc_ctx;
262 break;
263 case AES_CBC_MECH_INFO_TYPE:
264 param_len = AES_BLOCK_LEN;
265 alloc_fun = cbc_alloc_ctx;
266 break;
267 case AES_CTR_MECH_INFO_TYPE:
268 param_len = sizeof (CK_AES_CTR_PARAMS);
269 alloc_fun = ctr_alloc_ctx;
270 break;
271 case AES_CCM_MECH_INFO_TYPE:
272 param_len = sizeof (CK_AES_CCM_PARAMS);
273 alloc_fun = ccm_alloc_ctx;
274 break;
275 case AES_GCM_MECH_INFO_TYPE:
276 param_len = sizeof (CK_AES_GCM_PARAMS);
277 alloc_fun = gcm_alloc_ctx;
278 break;
279 case AES_GMAC_MECH_INFO_TYPE:
280 param_len = sizeof (CK_AES_GMAC_PARAMS);
281 alloc_fun = gmac_alloc_ctx;
282 break;
283 default:
284 rv = CRYPTO_MECHANISM_INVALID;
285 return (rv);
286 }
287 if (param_required && mechanism->cm_param != NULL &&
288 mechanism->cm_param_len != param_len) {
289 rv = CRYPTO_MECHANISM_PARAM_INVALID;
290 }
291 if (ctx != NULL) {
292 p = (alloc_fun)(kmflag);
293 *ctx = p;
294 }
295 return (rv);
296 }
297
298 /*
299 * Initialize key schedules for AES
300 */
301 static int
init_keysched(crypto_key_t * key,void * newbie)302 init_keysched(crypto_key_t *key, void *newbie)
303 {
304 /*
305 * Only keys by value are supported by this module.
306 */
307 switch (key->ck_format) {
308 case CRYPTO_KEY_RAW:
309 if (key->ck_length < AES_MINBITS ||
310 key->ck_length > AES_MAXBITS) {
311 return (CRYPTO_KEY_SIZE_RANGE);
312 }
313
314 /* key length must be either 128, 192, or 256 */
315 if ((key->ck_length & 63) != 0)
316 return (CRYPTO_KEY_SIZE_RANGE);
317 break;
318 default:
319 return (CRYPTO_KEY_TYPE_INCONSISTENT);
320 }
321
322 aes_init_keysched(key->ck_data, key->ck_length, newbie);
323 return (CRYPTO_SUCCESS);
324 }
325
326 /*
327 * KCF software provider control entry points.
328 */
329 /* ARGSUSED */
330 static void
aes_provider_status(crypto_provider_handle_t provider,uint_t * status)331 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
332 {
333 *status = CRYPTO_PROVIDER_READY;
334 }
335
336 static int
aes_encrypt_init(crypto_ctx_t * ctx,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_spi_ctx_template_t template,crypto_req_handle_t req)337 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
338 crypto_key_t *key, crypto_spi_ctx_template_t template,
339 crypto_req_handle_t req) {
340 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
341 }
342
343 static int
aes_decrypt_init(crypto_ctx_t * ctx,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_spi_ctx_template_t template,crypto_req_handle_t req)344 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
345 crypto_key_t *key, crypto_spi_ctx_template_t template,
346 crypto_req_handle_t req) {
347 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
348 }
349
350
351
352 /*
353 * KCF software provider encrypt entry points.
354 */
355 static int
aes_common_init(crypto_ctx_t * ctx,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_spi_ctx_template_t template,crypto_req_handle_t req,boolean_t is_encrypt_init)356 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
357 crypto_key_t *key, crypto_spi_ctx_template_t template,
358 crypto_req_handle_t req, boolean_t is_encrypt_init)
359 {
360 aes_ctx_t *aes_ctx;
361 int rv;
362 int kmflag;
363
364 /*
365 * Only keys by value are supported by this module.
366 */
367 if (key->ck_format != CRYPTO_KEY_RAW) {
368 return (CRYPTO_KEY_TYPE_INCONSISTENT);
369 }
370
371 kmflag = crypto_kmflag(req);
372 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
373 != CRYPTO_SUCCESS)
374 return (rv);
375
376 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
377 is_encrypt_init);
378 if (rv != CRYPTO_SUCCESS) {
379 crypto_free_mode_ctx(aes_ctx);
380 return (rv);
381 }
382
383 ctx->cc_provider_private = aes_ctx;
384
385 return (CRYPTO_SUCCESS);
386 }
387
388 static void
aes_copy_block64(uint8_t * in,uint64_t * out)389 aes_copy_block64(uint8_t *in, uint64_t *out)
390 {
391 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
392 /* LINTED: pointer alignment */
393 out[0] = *(uint64_t *)&in[0];
394 /* LINTED: pointer alignment */
395 out[1] = *(uint64_t *)&in[8];
396 } else {
397 uint8_t *iv8 = (uint8_t *)&out[0];
398
399 AES_COPY_BLOCK(in, iv8);
400 }
401 }
402
403
404 static int
aes_encrypt(crypto_ctx_t * ctx,crypto_data_t * plaintext,crypto_data_t * ciphertext,crypto_req_handle_t req)405 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
406 crypto_data_t *ciphertext, crypto_req_handle_t req)
407 {
408 int ret = CRYPTO_FAILED;
409
410 aes_ctx_t *aes_ctx;
411 size_t saved_length, saved_offset, length_needed;
412
413 ASSERT(ctx->cc_provider_private != NULL);
414 aes_ctx = ctx->cc_provider_private;
415
416 /*
417 * For block ciphers, plaintext must be a multiple of AES block size.
418 * This test is only valid for ciphers whose blocksize is a power of 2.
419 */
420 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
421 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
422 return (CRYPTO_DATA_LEN_RANGE);
423
424 AES_ARG_INPLACE(plaintext, ciphertext);
425
426 /*
427 * We need to just return the length needed to store the output.
428 * We should not destroy the context for the following case.
429 */
430 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
431 case CCM_MODE:
432 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
433 break;
434 case GCM_MODE:
435 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
436 break;
437 case GMAC_MODE:
438 if (plaintext->cd_length != 0)
439 return (CRYPTO_ARGUMENTS_BAD);
440
441 length_needed = aes_ctx->ac_tag_len;
442 break;
443 default:
444 length_needed = plaintext->cd_length;
445 }
446
447 if (ciphertext->cd_length < length_needed) {
448 ciphertext->cd_length = length_needed;
449 return (CRYPTO_BUFFER_TOO_SMALL);
450 }
451
452 saved_length = ciphertext->cd_length;
453 saved_offset = ciphertext->cd_offset;
454
455 /*
456 * Do an update on the specified input data.
457 */
458 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
459 if (ret != CRYPTO_SUCCESS) {
460 return (ret);
461 }
462
463 /*
464 * For CCM mode, aes_ccm_encrypt_final() will take care of any
465 * left-over unprocessed data, and compute the MAC
466 */
467 if (aes_ctx->ac_flags & CCM_MODE) {
468 /*
469 * ccm_encrypt_final() will compute the MAC and append
470 * it to existing ciphertext. So, need to adjust the left over
471 * length value accordingly
472 */
473
474 /* order of following 2 lines MUST not be reversed */
475 ciphertext->cd_offset = ciphertext->cd_length;
476 ciphertext->cd_length = saved_length - ciphertext->cd_length;
477 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
478 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
479 if (ret != CRYPTO_SUCCESS) {
480 return (ret);
481 }
482
483 if (plaintext != ciphertext) {
484 ciphertext->cd_length =
485 ciphertext->cd_offset - saved_offset;
486 }
487 ciphertext->cd_offset = saved_offset;
488 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
489 /*
490 * gcm_encrypt_final() will compute the MAC and append
491 * it to existing ciphertext. So, need to adjust the left over
492 * length value accordingly
493 */
494
495 /* order of following 2 lines MUST not be reversed */
496 ciphertext->cd_offset = ciphertext->cd_length;
497 ciphertext->cd_length = saved_length - ciphertext->cd_length;
498 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
499 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
500 aes_xor_block);
501 if (ret != CRYPTO_SUCCESS) {
502 return (ret);
503 }
504
505 if (plaintext != ciphertext) {
506 ciphertext->cd_length =
507 ciphertext->cd_offset - saved_offset;
508 }
509 ciphertext->cd_offset = saved_offset;
510 }
511
512 ASSERT(aes_ctx->ac_remainder_len == 0);
513 (void) aes_free_context(ctx);
514
515 return (ret);
516 }
517
518
519 static int
aes_decrypt(crypto_ctx_t * ctx,crypto_data_t * ciphertext,crypto_data_t * plaintext,crypto_req_handle_t req)520 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
521 crypto_data_t *plaintext, crypto_req_handle_t req)
522 {
523 int ret = CRYPTO_FAILED;
524
525 aes_ctx_t *aes_ctx;
526 off_t saved_offset;
527 size_t saved_length, length_needed;
528
529 ASSERT(ctx->cc_provider_private != NULL);
530 aes_ctx = ctx->cc_provider_private;
531
532 /*
533 * For block ciphers, plaintext must be a multiple of AES block size.
534 * This test is only valid for ciphers whose blocksize is a power of 2.
535 */
536 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
537 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
538 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
539 }
540
541 AES_ARG_INPLACE(ciphertext, plaintext);
542
543 /*
544 * Return length needed to store the output.
545 * Do not destroy context when plaintext buffer is too small.
546 *
547 * CCM: plaintext is MAC len smaller than cipher text
548 * GCM: plaintext is TAG len smaller than cipher text
549 * GMAC: plaintext length must be zero
550 */
551 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
552 case CCM_MODE:
553 length_needed = aes_ctx->ac_processed_data_len;
554 break;
555 case GCM_MODE:
556 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
557 break;
558 case GMAC_MODE:
559 if (plaintext->cd_length != 0)
560 return (CRYPTO_ARGUMENTS_BAD);
561
562 length_needed = 0;
563 break;
564 default:
565 length_needed = ciphertext->cd_length;
566 }
567
568 if (plaintext->cd_length < length_needed) {
569 plaintext->cd_length = length_needed;
570 return (CRYPTO_BUFFER_TOO_SMALL);
571 }
572
573 saved_offset = plaintext->cd_offset;
574 saved_length = plaintext->cd_length;
575
576 /*
577 * Do an update on the specified input data.
578 */
579 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
580 if (ret != CRYPTO_SUCCESS) {
581 goto cleanup;
582 }
583
584 if (aes_ctx->ac_flags & CCM_MODE) {
585 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
586 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
587
588 /* order of following 2 lines MUST not be reversed */
589 plaintext->cd_offset = plaintext->cd_length;
590 plaintext->cd_length = saved_length - plaintext->cd_length;
591
592 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
593 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
594 aes_xor_block);
595 if (ret == CRYPTO_SUCCESS) {
596 if (plaintext != ciphertext) {
597 plaintext->cd_length =
598 plaintext->cd_offset - saved_offset;
599 }
600 } else {
601 plaintext->cd_length = saved_length;
602 }
603
604 plaintext->cd_offset = saved_offset;
605 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
606 /* order of following 2 lines MUST not be reversed */
607 plaintext->cd_offset = plaintext->cd_length;
608 plaintext->cd_length = saved_length - plaintext->cd_length;
609
610 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
611 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
612 if (ret == CRYPTO_SUCCESS) {
613 if (plaintext != ciphertext) {
614 plaintext->cd_length =
615 plaintext->cd_offset - saved_offset;
616 }
617 } else {
618 plaintext->cd_length = saved_length;
619 }
620
621 plaintext->cd_offset = saved_offset;
622 }
623
624 ASSERT(aes_ctx->ac_remainder_len == 0);
625
626 cleanup:
627 (void) aes_free_context(ctx);
628
629 return (ret);
630 }
631
632
633 /* ARGSUSED */
634 static int
aes_encrypt_update(crypto_ctx_t * ctx,crypto_data_t * plaintext,crypto_data_t * ciphertext,crypto_req_handle_t req)635 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
636 crypto_data_t *ciphertext, crypto_req_handle_t req)
637 {
638 off_t saved_offset;
639 size_t saved_length, out_len;
640 int ret = CRYPTO_SUCCESS;
641 aes_ctx_t *aes_ctx;
642
643 ASSERT(ctx->cc_provider_private != NULL);
644 aes_ctx = ctx->cc_provider_private;
645
646 AES_ARG_INPLACE(plaintext, ciphertext);
647
648 /* compute number of bytes that will hold the ciphertext */
649 out_len = aes_ctx->ac_remainder_len;
650 out_len += plaintext->cd_length;
651 out_len &= ~(AES_BLOCK_LEN - 1);
652
653 /* return length needed to store the output */
654 if (ciphertext->cd_length < out_len) {
655 ciphertext->cd_length = out_len;
656 return (CRYPTO_BUFFER_TOO_SMALL);
657 }
658
659 saved_offset = ciphertext->cd_offset;
660 saved_length = ciphertext->cd_length;
661
662 /*
663 * Do the AES update on the specified input data.
664 */
665 switch (plaintext->cd_format) {
666 case CRYPTO_DATA_RAW:
667 ret = crypto_update_iov(ctx->cc_provider_private,
668 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
669 aes_copy_block64);
670 break;
671 case CRYPTO_DATA_UIO:
672 ret = crypto_update_uio(ctx->cc_provider_private,
673 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
674 aes_copy_block64);
675 break;
676 case CRYPTO_DATA_MBLK:
677 ret = crypto_update_mp(ctx->cc_provider_private,
678 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
679 aes_copy_block64);
680 break;
681 default:
682 ret = CRYPTO_ARGUMENTS_BAD;
683 }
684
685 /*
686 * Since AES counter mode is a stream cipher, we call
687 * ctr_mode_final() to pick up any remaining bytes.
688 * It is an internal function that does not destroy
689 * the context like *normal* final routines.
690 */
691 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
692 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
693 ciphertext, aes_encrypt_block);
694 }
695
696 if (ret == CRYPTO_SUCCESS) {
697 if (plaintext != ciphertext)
698 ciphertext->cd_length =
699 ciphertext->cd_offset - saved_offset;
700 } else {
701 ciphertext->cd_length = saved_length;
702 }
703 ciphertext->cd_offset = saved_offset;
704
705 return (ret);
706 }
707
708
709 static int
aes_decrypt_update(crypto_ctx_t * ctx,crypto_data_t * ciphertext,crypto_data_t * plaintext,crypto_req_handle_t req)710 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
711 crypto_data_t *plaintext, crypto_req_handle_t req)
712 {
713 off_t saved_offset;
714 size_t saved_length, out_len;
715 int ret = CRYPTO_SUCCESS;
716 aes_ctx_t *aes_ctx;
717
718 ASSERT(ctx->cc_provider_private != NULL);
719 aes_ctx = ctx->cc_provider_private;
720
721 AES_ARG_INPLACE(ciphertext, plaintext);
722
723 /*
724 * Compute number of bytes that will hold the plaintext.
725 * This is not necessary for CCM, GCM, and GMAC since these
726 * mechanisms never return plaintext for update operations.
727 */
728 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
729 out_len = aes_ctx->ac_remainder_len;
730 out_len += ciphertext->cd_length;
731 out_len &= ~(AES_BLOCK_LEN - 1);
732
733 /* return length needed to store the output */
734 if (plaintext->cd_length < out_len) {
735 plaintext->cd_length = out_len;
736 return (CRYPTO_BUFFER_TOO_SMALL);
737 }
738 }
739
740 saved_offset = plaintext->cd_offset;
741 saved_length = plaintext->cd_length;
742
743 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
744 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
745
746 /*
747 * Do the AES update on the specified input data.
748 */
749 switch (ciphertext->cd_format) {
750 case CRYPTO_DATA_RAW:
751 ret = crypto_update_iov(ctx->cc_provider_private,
752 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
753 aes_copy_block64);
754 break;
755 case CRYPTO_DATA_UIO:
756 ret = crypto_update_uio(ctx->cc_provider_private,
757 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
758 aes_copy_block64);
759 break;
760 case CRYPTO_DATA_MBLK:
761 ret = crypto_update_mp(ctx->cc_provider_private,
762 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
763 aes_copy_block64);
764 break;
765 default:
766 ret = CRYPTO_ARGUMENTS_BAD;
767 }
768
769 /*
770 * Since AES counter mode is a stream cipher, we call
771 * ctr_mode_final() to pick up any remaining bytes.
772 * It is an internal function that does not destroy
773 * the context like *normal* final routines.
774 */
775 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
776 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
777 aes_encrypt_block);
778 if (ret == CRYPTO_DATA_LEN_RANGE)
779 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
780 }
781
782 if (ret == CRYPTO_SUCCESS) {
783 if (ciphertext != plaintext)
784 plaintext->cd_length =
785 plaintext->cd_offset - saved_offset;
786 } else {
787 plaintext->cd_length = saved_length;
788 }
789 plaintext->cd_offset = saved_offset;
790
791
792 return (ret);
793 }
794
795 /* ARGSUSED */
796 static int
aes_encrypt_final(crypto_ctx_t * ctx,crypto_data_t * data,crypto_req_handle_t req)797 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
798 crypto_req_handle_t req)
799 {
800 aes_ctx_t *aes_ctx;
801 int ret;
802
803 ASSERT(ctx->cc_provider_private != NULL);
804 aes_ctx = ctx->cc_provider_private;
805
806 if (data->cd_format != CRYPTO_DATA_RAW &&
807 data->cd_format != CRYPTO_DATA_UIO &&
808 data->cd_format != CRYPTO_DATA_MBLK) {
809 return (CRYPTO_ARGUMENTS_BAD);
810 }
811
812 if (aes_ctx->ac_flags & CTR_MODE) {
813 if (aes_ctx->ac_remainder_len > 0) {
814 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
815 aes_encrypt_block);
816 if (ret != CRYPTO_SUCCESS)
817 return (ret);
818 }
819 } else if (aes_ctx->ac_flags & CCM_MODE) {
820 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
821 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
822 if (ret != CRYPTO_SUCCESS) {
823 return (ret);
824 }
825 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
826 size_t saved_offset = data->cd_offset;
827
828 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
829 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
830 aes_xor_block);
831 if (ret != CRYPTO_SUCCESS) {
832 return (ret);
833 }
834 data->cd_length = data->cd_offset - saved_offset;
835 data->cd_offset = saved_offset;
836 } else {
837 /*
838 * There must be no unprocessed plaintext.
839 * This happens if the length of the last data is
840 * not a multiple of the AES block length.
841 */
842 if (aes_ctx->ac_remainder_len > 0) {
843 return (CRYPTO_DATA_LEN_RANGE);
844 }
845 data->cd_length = 0;
846 }
847
848 (void) aes_free_context(ctx);
849
850 return (CRYPTO_SUCCESS);
851 }
852
853 /* ARGSUSED */
854 static int
aes_decrypt_final(crypto_ctx_t * ctx,crypto_data_t * data,crypto_req_handle_t req)855 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
856 crypto_req_handle_t req)
857 {
858 aes_ctx_t *aes_ctx;
859 int ret;
860 off_t saved_offset;
861 size_t saved_length;
862
863 ASSERT(ctx->cc_provider_private != NULL);
864 aes_ctx = ctx->cc_provider_private;
865
866 if (data->cd_format != CRYPTO_DATA_RAW &&
867 data->cd_format != CRYPTO_DATA_UIO &&
868 data->cd_format != CRYPTO_DATA_MBLK) {
869 return (CRYPTO_ARGUMENTS_BAD);
870 }
871
872 /*
873 * There must be no unprocessed ciphertext.
874 * This happens if the length of the last ciphertext is
875 * not a multiple of the AES block length.
876 */
877 if (aes_ctx->ac_remainder_len > 0) {
878 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
879 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
880 else {
881 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
882 aes_encrypt_block);
883 if (ret == CRYPTO_DATA_LEN_RANGE)
884 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
885 if (ret != CRYPTO_SUCCESS)
886 return (ret);
887 }
888 }
889
890 if (aes_ctx->ac_flags & CCM_MODE) {
891 /*
892 * This is where all the plaintext is returned, make sure
893 * the plaintext buffer is big enough
894 */
895 size_t pt_len = aes_ctx->ac_data_len;
896 if (data->cd_length < pt_len) {
897 data->cd_length = pt_len;
898 return (CRYPTO_BUFFER_TOO_SMALL);
899 }
900
901 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
902 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
903 saved_offset = data->cd_offset;
904 saved_length = data->cd_length;
905 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
906 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
907 aes_xor_block);
908 if (ret == CRYPTO_SUCCESS) {
909 data->cd_length = data->cd_offset - saved_offset;
910 } else {
911 data->cd_length = saved_length;
912 }
913
914 data->cd_offset = saved_offset;
915 if (ret != CRYPTO_SUCCESS) {
916 return (ret);
917 }
918 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
919 /*
920 * This is where all the plaintext is returned, make sure
921 * the plaintext buffer is big enough
922 */
923 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
924 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
925
926 if (data->cd_length < pt_len) {
927 data->cd_length = pt_len;
928 return (CRYPTO_BUFFER_TOO_SMALL);
929 }
930
931 saved_offset = data->cd_offset;
932 saved_length = data->cd_length;
933 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
934 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
935 if (ret == CRYPTO_SUCCESS) {
936 data->cd_length = data->cd_offset - saved_offset;
937 } else {
938 data->cd_length = saved_length;
939 }
940
941 data->cd_offset = saved_offset;
942 if (ret != CRYPTO_SUCCESS) {
943 return (ret);
944 }
945 }
946
947
948 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
949 data->cd_length = 0;
950 }
951
952 (void) aes_free_context(ctx);
953
954 return (CRYPTO_SUCCESS);
955 }
956
957 /* ARGSUSED */
958 static int
aes_encrypt_atomic(crypto_provider_handle_t provider,crypto_session_id_t session_id,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * plaintext,crypto_data_t * ciphertext,crypto_spi_ctx_template_t template,crypto_req_handle_t req)959 aes_encrypt_atomic(crypto_provider_handle_t provider,
960 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
961 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
962 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
963 {
964 aes_ctx_t aes_ctx; /* on the stack */
965 off_t saved_offset;
966 size_t saved_length;
967 size_t length_needed;
968 int ret;
969
970 AES_ARG_INPLACE(plaintext, ciphertext);
971
972 /*
973 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
974 * be a multiple of AES block size.
975 */
976 switch (mechanism->cm_type) {
977 case AES_CTR_MECH_INFO_TYPE:
978 case AES_CCM_MECH_INFO_TYPE:
979 case AES_GCM_MECH_INFO_TYPE:
980 case AES_GMAC_MECH_INFO_TYPE:
981 break;
982 default:
983 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
984 return (CRYPTO_DATA_LEN_RANGE);
985 }
986
987 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
988 return (ret);
989
990 bzero(&aes_ctx, sizeof (aes_ctx_t));
991
992 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
993 crypto_kmflag(req), B_TRUE);
994 if (ret != CRYPTO_SUCCESS)
995 return (ret);
996
997 switch (mechanism->cm_type) {
998 case AES_CCM_MECH_INFO_TYPE:
999 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1000 break;
1001 case AES_GMAC_MECH_INFO_TYPE:
1002 if (plaintext->cd_length != 0)
1003 return (CRYPTO_ARGUMENTS_BAD);
1004 /* FALLTHRU */
1005 case AES_GCM_MECH_INFO_TYPE:
1006 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1007 break;
1008 default:
1009 length_needed = plaintext->cd_length;
1010 }
1011
1012 /* return size of buffer needed to store output */
1013 if (ciphertext->cd_length < length_needed) {
1014 ciphertext->cd_length = length_needed;
1015 ret = CRYPTO_BUFFER_TOO_SMALL;
1016 goto out;
1017 }
1018
1019 saved_offset = ciphertext->cd_offset;
1020 saved_length = ciphertext->cd_length;
1021
1022 /*
1023 * Do an update on the specified input data.
1024 */
1025 switch (plaintext->cd_format) {
1026 case CRYPTO_DATA_RAW:
1027 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1028 aes_encrypt_contiguous_blocks, aes_copy_block64);
1029 break;
1030 case CRYPTO_DATA_UIO:
1031 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1032 aes_encrypt_contiguous_blocks, aes_copy_block64);
1033 break;
1034 case CRYPTO_DATA_MBLK:
1035 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1036 aes_encrypt_contiguous_blocks, aes_copy_block64);
1037 break;
1038 default:
1039 ret = CRYPTO_ARGUMENTS_BAD;
1040 }
1041
1042 if (ret == CRYPTO_SUCCESS) {
1043 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1044 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1045 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1046 aes_xor_block);
1047 if (ret != CRYPTO_SUCCESS)
1048 goto out;
1049 ASSERT(aes_ctx.ac_remainder_len == 0);
1050 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1051 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1052 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1053 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1054 aes_copy_block, aes_xor_block);
1055 if (ret != CRYPTO_SUCCESS)
1056 goto out;
1057 ASSERT(aes_ctx.ac_remainder_len == 0);
1058 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1059 if (aes_ctx.ac_remainder_len > 0) {
1060 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1061 ciphertext, aes_encrypt_block);
1062 if (ret != CRYPTO_SUCCESS)
1063 goto out;
1064 }
1065 } else {
1066 ASSERT(aes_ctx.ac_remainder_len == 0);
1067 }
1068
1069 if (plaintext != ciphertext) {
1070 ciphertext->cd_length =
1071 ciphertext->cd_offset - saved_offset;
1072 }
1073 } else {
1074 ciphertext->cd_length = saved_length;
1075 }
1076 ciphertext->cd_offset = saved_offset;
1077
1078 out:
1079 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1080 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1081 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1082 }
1083
1084 return (ret);
1085 }
1086
1087 /* ARGSUSED */
1088 static int
aes_decrypt_atomic(crypto_provider_handle_t provider,crypto_session_id_t session_id,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * ciphertext,crypto_data_t * plaintext,crypto_spi_ctx_template_t template,crypto_req_handle_t req)1089 aes_decrypt_atomic(crypto_provider_handle_t provider,
1090 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1091 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1092 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1093 {
1094 aes_ctx_t aes_ctx; /* on the stack */
1095 off_t saved_offset;
1096 size_t saved_length;
1097 size_t length_needed;
1098 int ret;
1099
1100 AES_ARG_INPLACE(ciphertext, plaintext);
1101
1102 /*
1103 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1104 * be a multiple of AES block size.
1105 */
1106 switch (mechanism->cm_type) {
1107 case AES_CTR_MECH_INFO_TYPE:
1108 case AES_CCM_MECH_INFO_TYPE:
1109 case AES_GCM_MECH_INFO_TYPE:
1110 case AES_GMAC_MECH_INFO_TYPE:
1111 break;
1112 default:
1113 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1114 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1115 }
1116
1117 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1118 return (ret);
1119
1120 bzero(&aes_ctx, sizeof (aes_ctx_t));
1121
1122 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1123 crypto_kmflag(req), B_FALSE);
1124 if (ret != CRYPTO_SUCCESS)
1125 return (ret);
1126
1127 switch (mechanism->cm_type) {
1128 case AES_CCM_MECH_INFO_TYPE:
1129 length_needed = aes_ctx.ac_data_len;
1130 break;
1131 case AES_GCM_MECH_INFO_TYPE:
1132 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1133 break;
1134 case AES_GMAC_MECH_INFO_TYPE:
1135 if (plaintext->cd_length != 0)
1136 return (CRYPTO_ARGUMENTS_BAD);
1137 length_needed = 0;
1138 break;
1139 default:
1140 length_needed = ciphertext->cd_length;
1141 }
1142
1143 /* return size of buffer needed to store output */
1144 if (plaintext->cd_length < length_needed) {
1145 plaintext->cd_length = length_needed;
1146 ret = CRYPTO_BUFFER_TOO_SMALL;
1147 goto out;
1148 }
1149
1150 saved_offset = plaintext->cd_offset;
1151 saved_length = plaintext->cd_length;
1152
1153 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1154 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1155 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1156
1157 /*
1158 * Do an update on the specified input data.
1159 */
1160 switch (ciphertext->cd_format) {
1161 case CRYPTO_DATA_RAW:
1162 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1163 aes_decrypt_contiguous_blocks, aes_copy_block64);
1164 break;
1165 case CRYPTO_DATA_UIO:
1166 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1167 aes_decrypt_contiguous_blocks, aes_copy_block64);
1168 break;
1169 case CRYPTO_DATA_MBLK:
1170 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1171 aes_decrypt_contiguous_blocks, aes_copy_block64);
1172 break;
1173 default:
1174 ret = CRYPTO_ARGUMENTS_BAD;
1175 }
1176
1177 if (ret == CRYPTO_SUCCESS) {
1178 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1179 ASSERT(aes_ctx.ac_processed_data_len
1180 == aes_ctx.ac_data_len);
1181 ASSERT(aes_ctx.ac_processed_mac_len
1182 == aes_ctx.ac_mac_len);
1183 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1184 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1185 aes_copy_block, aes_xor_block);
1186 ASSERT(aes_ctx.ac_remainder_len == 0);
1187 if ((ret == CRYPTO_SUCCESS) &&
1188 (ciphertext != plaintext)) {
1189 plaintext->cd_length =
1190 plaintext->cd_offset - saved_offset;
1191 } else {
1192 plaintext->cd_length = saved_length;
1193 }
1194 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1195 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1196 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1197 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1198 aes_xor_block);
1199 ASSERT(aes_ctx.ac_remainder_len == 0);
1200 if ((ret == CRYPTO_SUCCESS) &&
1201 (ciphertext != plaintext)) {
1202 plaintext->cd_length =
1203 plaintext->cd_offset - saved_offset;
1204 } else {
1205 plaintext->cd_length = saved_length;
1206 }
1207 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1208 ASSERT(aes_ctx.ac_remainder_len == 0);
1209 if (ciphertext != plaintext)
1210 plaintext->cd_length =
1211 plaintext->cd_offset - saved_offset;
1212 } else {
1213 if (aes_ctx.ac_remainder_len > 0) {
1214 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1215 plaintext, aes_encrypt_block);
1216 if (ret == CRYPTO_DATA_LEN_RANGE)
1217 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1218 if (ret != CRYPTO_SUCCESS)
1219 goto out;
1220 }
1221 if (ciphertext != plaintext)
1222 plaintext->cd_length =
1223 plaintext->cd_offset - saved_offset;
1224 }
1225 } else {
1226 plaintext->cd_length = saved_length;
1227 }
1228 plaintext->cd_offset = saved_offset;
1229
1230 out:
1231 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1232 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1233 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1234 }
1235
1236 if (aes_ctx.ac_flags & CCM_MODE) {
1237 if (aes_ctx.ac_pt_buf != NULL) {
1238 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1239 }
1240 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1241 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1242 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1243 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1244 }
1245 }
1246
1247 return (ret);
1248 }
1249
1250 /*
1251 * KCF software provider context template entry points.
1252 */
1253 /* ARGSUSED */
1254 static int
aes_create_ctx_template(crypto_provider_handle_t provider,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_spi_ctx_template_t * tmpl,size_t * tmpl_size,crypto_req_handle_t req)1255 aes_create_ctx_template(crypto_provider_handle_t provider,
1256 crypto_mechanism_t *mechanism, crypto_key_t *key,
1257 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1258 {
1259 void *keysched;
1260 size_t size;
1261 int rv;
1262
1263 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1264 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1265 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1266 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1267 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1268 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1269 return (CRYPTO_MECHANISM_INVALID);
1270
1271 if ((keysched = aes_alloc_keysched(&size,
1272 crypto_kmflag(req))) == NULL) {
1273 return (CRYPTO_HOST_MEMORY);
1274 }
1275
1276 /*
1277 * Initialize key schedule. Key length information is stored
1278 * in the key.
1279 */
1280 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1281 bzero(keysched, size);
1282 kmem_free(keysched, size);
1283 return (rv);
1284 }
1285
1286 *tmpl = keysched;
1287 *tmpl_size = size;
1288
1289 return (CRYPTO_SUCCESS);
1290 }
1291
1292
1293 static int
aes_free_context(crypto_ctx_t * ctx)1294 aes_free_context(crypto_ctx_t *ctx)
1295 {
1296 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1297
1298 if (aes_ctx != NULL) {
1299 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1300 ASSERT(aes_ctx->ac_keysched_len != 0);
1301 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1302 kmem_free(aes_ctx->ac_keysched,
1303 aes_ctx->ac_keysched_len);
1304 }
1305 crypto_free_mode_ctx(aes_ctx);
1306 ctx->cc_provider_private = NULL;
1307 }
1308
1309 return (CRYPTO_SUCCESS);
1310 }
1311
1312
1313 static int
aes_common_init_ctx(aes_ctx_t * aes_ctx,crypto_spi_ctx_template_t * template,crypto_mechanism_t * mechanism,crypto_key_t * key,int kmflag,boolean_t is_encrypt_init)1314 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1315 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1316 boolean_t is_encrypt_init)
1317 {
1318 int rv = CRYPTO_SUCCESS;
1319 void *keysched;
1320 size_t size;
1321
1322 if (template == NULL) {
1323 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1324 return (CRYPTO_HOST_MEMORY);
1325 /*
1326 * Initialize key schedule.
1327 * Key length is stored in the key.
1328 */
1329 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1330 kmem_free(keysched, size);
1331 return (rv);
1332 }
1333
1334 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1335 aes_ctx->ac_keysched_len = size;
1336 } else {
1337 keysched = template;
1338 }
1339 aes_ctx->ac_keysched = keysched;
1340
1341 switch (mechanism->cm_type) {
1342 case AES_CBC_MECH_INFO_TYPE:
1343 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1344 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1345 break;
1346 case AES_CTR_MECH_INFO_TYPE: {
1347 CK_AES_CTR_PARAMS *pp;
1348
1349 if (mechanism->cm_param == NULL ||
1350 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1351 return (CRYPTO_MECHANISM_PARAM_INVALID);
1352 }
1353 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1354 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1355 pp->cb, aes_copy_block);
1356 break;
1357 }
1358 case AES_CCM_MECH_INFO_TYPE:
1359 if (mechanism->cm_param == NULL ||
1360 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1361 return (CRYPTO_MECHANISM_PARAM_INVALID);
1362 }
1363 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1364 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1365 aes_xor_block);
1366 break;
1367 case AES_GCM_MECH_INFO_TYPE:
1368 if (mechanism->cm_param == NULL ||
1369 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1370 return (CRYPTO_MECHANISM_PARAM_INVALID);
1371 }
1372 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1373 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1374 aes_xor_block);
1375 break;
1376 case AES_GMAC_MECH_INFO_TYPE:
1377 if (mechanism->cm_param == NULL ||
1378 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1379 return (CRYPTO_MECHANISM_PARAM_INVALID);
1380 }
1381 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1382 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1383 aes_xor_block);
1384 break;
1385 case AES_ECB_MECH_INFO_TYPE:
1386 aes_ctx->ac_flags |= ECB_MODE;
1387 }
1388
1389 if (rv != CRYPTO_SUCCESS) {
1390 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1391 bzero(keysched, size);
1392 kmem_free(keysched, size);
1393 }
1394 }
1395
1396 return (rv);
1397 }
1398
1399 static int
process_gmac_mech(crypto_mechanism_t * mech,crypto_data_t * data,CK_AES_GCM_PARAMS * gcm_params)1400 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1401 CK_AES_GCM_PARAMS *gcm_params)
1402 {
1403 /* LINTED: pointer alignment */
1404 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1405
1406 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1407 return (CRYPTO_MECHANISM_INVALID);
1408
1409 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1410 return (CRYPTO_MECHANISM_PARAM_INVALID);
1411
1412 if (params->pIv == NULL)
1413 return (CRYPTO_MECHANISM_PARAM_INVALID);
1414
1415 gcm_params->pIv = params->pIv;
1416 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1417 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1418
1419 if (data == NULL)
1420 return (CRYPTO_SUCCESS);
1421
1422 if (data->cd_format != CRYPTO_DATA_RAW)
1423 return (CRYPTO_ARGUMENTS_BAD);
1424
1425 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1426 gcm_params->ulAADLen = data->cd_length;
1427 return (CRYPTO_SUCCESS);
1428 }
1429
1430 static int
aes_mac_atomic(crypto_provider_handle_t provider,crypto_session_id_t session_id,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * data,crypto_data_t * mac,crypto_spi_ctx_template_t template,crypto_req_handle_t req)1431 aes_mac_atomic(crypto_provider_handle_t provider,
1432 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1433 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1434 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1435 {
1436 CK_AES_GCM_PARAMS gcm_params;
1437 crypto_mechanism_t gcm_mech;
1438 int rv;
1439
1440 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1441 != CRYPTO_SUCCESS)
1442 return (rv);
1443
1444 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1445 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1446 gcm_mech.cm_param = (char *)&gcm_params;
1447
1448 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1449 key, &null_crypto_data, mac, template, req));
1450 }
1451
1452 static int
aes_mac_verify_atomic(crypto_provider_handle_t provider,crypto_session_id_t session_id,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * data,crypto_data_t * mac,crypto_spi_ctx_template_t template,crypto_req_handle_t req)1453 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1454 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1455 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1456 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1457 {
1458 CK_AES_GCM_PARAMS gcm_params;
1459 crypto_mechanism_t gcm_mech;
1460 int rv;
1461
1462 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1463 != CRYPTO_SUCCESS)
1464 return (rv);
1465
1466 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1467 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1468 gcm_mech.cm_param = (char *)&gcm_params;
1469
1470 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1471 key, mac, &null_crypto_data, template, req));
1472 }
1473