xref: /titanic_44/usr/src/uts/common/crypto/io/aes.c (revision 628e3cbed6489fa1db545d8524a06cd6535af456)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #pragma ident	"%Z%%M%	%I%	%E% SMI"
27 
28 /*
29  * AES provider for the Kernel Cryptographic Framework (KCF)
30  */
31 
32 #include <sys/types.h>
33 #include <sys/systm.h>
34 #include <sys/modctl.h>
35 #include <sys/cmn_err.h>
36 #include <sys/ddi.h>
37 #include <sys/crypto/common.h>
38 #include <sys/crypto/impl.h>
39 #include <sys/crypto/spi.h>
40 #include <sys/sysmacros.h>
41 #include <sys/strsun.h>
42 #include <modes/modes.h>
43 #include <aes/aes_impl.h>
44 
45 extern struct mod_ops mod_cryptoops;
46 
47 /*
48  * Module linkage information for the kernel.
49  */
50 static struct modlcrypto modlcrypto = {
51 	&mod_cryptoops,
52 	"AES Kernel SW Provider"
53 };
54 
55 static struct modlinkage modlinkage = {
56 	MODREV_1,
57 	(void *)&modlcrypto,
58 	NULL
59 };
60 
61 /*
62  * CSPI information (entry points, provider info, etc.)
63  */
64 typedef enum aes_mech_type {
65 	AES_ECB_MECH_INFO_TYPE,		/* SUN_CKM_AES_ECB */
66 	AES_CBC_MECH_INFO_TYPE,		/* SUN_CKM_AES_CBC */
67 	AES_CBC_PAD_MECH_INFO_TYPE,	/* SUN_CKM_AES_CBC_PAD */
68 	AES_CTR_MECH_INFO_TYPE,		/* SUN_CKM_AES_CTR */
69 	AES_CCM_MECH_INFO_TYPE		/* SUN_CKM_AES_CCM */
70 } aes_mech_type_t;
71 
72 /*
73  * The following definitions are to keep EXPORT_SRC happy.
74  */
75 #ifndef AES_MIN_KEY_BYTES
76 #define	AES_MIN_KEY_BYTES		0
77 #endif
78 
79 #ifndef AES_MAX_KEY_BYTES
80 #define	AES_MAX_KEY_BYTES		0
81 #endif
82 
83 /*
84  * Mechanism info structure passed to KCF during registration.
85  */
86 static crypto_mech_info_t aes_mech_info_tab[] = {
87 	/* AES_ECB */
88 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
89 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
90 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
91 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
92 	/* AES_CBC */
93 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
94 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
95 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
96 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
97 	/* AES_CTR */
98 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
99 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
100 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
101 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
102 	/* AES_CCM */
103 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
104 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
105 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
106 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
107 };
108 
109 /* operations are in-place if the output buffer is NULL */
110 #define	AES_ARG_INPLACE(input, output)				\
111 	if ((output) == NULL)					\
112 		(output) = (input);
113 
114 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
115 
116 static crypto_control_ops_t aes_control_ops = {
117 	aes_provider_status
118 };
119 
120 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
121     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
122 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
123     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
124 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
125     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
126 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
127     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
128 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
129     crypto_req_handle_t);
130 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
131     crypto_req_handle_t);
132 
133 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
134     crypto_req_handle_t);
135 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
136     crypto_data_t *, crypto_req_handle_t);
137 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
138     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
139     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
140 
141 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
142     crypto_req_handle_t);
143 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
144     crypto_data_t *, crypto_req_handle_t);
145 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
146     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
147     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
148 
149 static crypto_cipher_ops_t aes_cipher_ops = {
150 	aes_encrypt_init,
151 	aes_encrypt,
152 	aes_encrypt_update,
153 	aes_encrypt_final,
154 	aes_encrypt_atomic,
155 	aes_decrypt_init,
156 	aes_decrypt,
157 	aes_decrypt_update,
158 	aes_decrypt_final,
159 	aes_decrypt_atomic
160 };
161 
162 static int aes_create_ctx_template(crypto_provider_handle_t,
163     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
164     size_t *, crypto_req_handle_t);
165 static int aes_free_context(crypto_ctx_t *);
166 
167 static crypto_ctx_ops_t aes_ctx_ops = {
168 	aes_create_ctx_template,
169 	aes_free_context
170 };
171 
172 static crypto_ops_t aes_crypto_ops = {
173 	&aes_control_ops,
174 	NULL,
175 	&aes_cipher_ops,
176 	NULL,
177 	NULL,
178 	NULL,
179 	NULL,
180 	NULL,
181 	NULL,
182 	NULL,
183 	NULL,
184 	NULL,
185 	NULL,
186 	&aes_ctx_ops
187 };
188 
189 static crypto_provider_info_t aes_prov_info = {
190 	CRYPTO_SPI_VERSION_1,
191 	"AES Software Provider",
192 	CRYPTO_SW_PROVIDER,
193 	{&modlinkage},
194 	NULL,
195 	&aes_crypto_ops,
196 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
197 	aes_mech_info_tab
198 };
199 
200 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
201 
202 int
203 _init(void)
204 {
205 	int ret;
206 
207 	/*
208 	 * Register with KCF. If the registration fails, return error.
209 	 */
210 	if ((ret = crypto_register_provider(&aes_prov_info,
211 	    &aes_prov_handle)) != CRYPTO_SUCCESS) {
212 		cmn_err(CE_WARN, "%s _init: crypto_register_provider()"
213 		    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
214 		return (EACCES);
215 	}
216 
217 	if ((ret = mod_install(&modlinkage)) != 0) {
218 		int rv;
219 
220 		ASSERT(aes_prov_handle != NULL);
221 		/* We should not return if the unregister returns busy. */
222 		while ((rv = crypto_unregister_provider(aes_prov_handle))
223 		    == CRYPTO_BUSY) {
224 			cmn_err(CE_WARN,
225 			    "%s _init: crypto_unregister_provider() "
226 			    "failed (0x%x). Retrying.",
227 			    CRYPTO_PROVIDER_NAME, rv);
228 			/* wait 10 seconds and try again. */
229 			delay(10 * drv_usectohz(1000000));
230 		}
231 	}
232 
233 	return (ret);
234 }
235 
236 int
237 _fini(void)
238 {
239 	int ret;
240 
241 	/*
242 	 * Unregister from KCF if previous registration succeeded.
243 	 */
244 	if (aes_prov_handle != NULL) {
245 		if ((ret = crypto_unregister_provider(aes_prov_handle)) !=
246 		    CRYPTO_SUCCESS) {
247 			cmn_err(CE_WARN,
248 			    "%s _fini: crypto_unregister_provider() "
249 			    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
250 			return (EBUSY);
251 		}
252 		aes_prov_handle = NULL;
253 	}
254 
255 	return (mod_remove(&modlinkage));
256 }
257 
258 int
259 _info(struct modinfo *modinfop)
260 {
261 	return (mod_info(&modlinkage, modinfop));
262 }
263 
264 
265 static int
266 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
267 {
268 	void *p = NULL;
269 	int rv = CRYPTO_SUCCESS;
270 
271 	switch (mechanism->cm_type) {
272 	case AES_ECB_MECH_INFO_TYPE:
273 		/* no parameter */
274 		if (ctx != NULL)
275 			p = ecb_alloc_ctx(kmflag);
276 		break;
277 	case AES_CBC_MECH_INFO_TYPE:
278 		if (mechanism->cm_param != NULL &&
279 		    mechanism->cm_param_len != AES_BLOCK_LEN) {
280 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
281 			break;
282 		}
283 		if (ctx != NULL)
284 			p = cbc_alloc_ctx(kmflag);
285 		break;
286 	case AES_CTR_MECH_INFO_TYPE:
287 		if (mechanism->cm_param != NULL &&
288 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
289 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
290 			break;
291 		}
292 		if (ctx != NULL)
293 			p = ctr_alloc_ctx(kmflag);
294 		break;
295 	case AES_CCM_MECH_INFO_TYPE:
296 		if (mechanism->cm_param != NULL &&
297 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
298 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
299 			break;
300 		}
301 		if (ctx != NULL)
302 			p = ccm_alloc_ctx(kmflag);
303 		break;
304 	default:
305 		rv = CRYPTO_MECHANISM_INVALID;
306 	}
307 	if (ctx != NULL)
308 		*ctx = p;
309 
310 	return (rv);
311 }
312 
313 /* EXPORT DELETE START */
314 
315 /*
316  * Initialize key schedules for AES
317  */
318 static int
319 init_keysched(crypto_key_t *key, void *newbie)
320 {
321 	/*
322 	 * Only keys by value are supported by this module.
323 	 */
324 	switch (key->ck_format) {
325 	case CRYPTO_KEY_RAW:
326 		if (key->ck_length < AES_MINBITS ||
327 		    key->ck_length > AES_MAXBITS) {
328 			return (CRYPTO_KEY_SIZE_RANGE);
329 		}
330 
331 		/* key length must be either 128, 192, or 256 */
332 		if ((key->ck_length & 63) != 0)
333 			return (CRYPTO_KEY_SIZE_RANGE);
334 		break;
335 	default:
336 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
337 	}
338 
339 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
340 	return (CRYPTO_SUCCESS);
341 }
342 
343 /* EXPORT DELETE END */
344 
345 /*
346  * KCF software provider control entry points.
347  */
348 /* ARGSUSED */
349 static void
350 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
351 {
352 	*status = CRYPTO_PROVIDER_READY;
353 }
354 
355 static int
356 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
357     crypto_key_t *key, crypto_spi_ctx_template_t template,
358     crypto_req_handle_t req) {
359 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
360 }
361 
362 static int
363 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
364     crypto_key_t *key, crypto_spi_ctx_template_t template,
365     crypto_req_handle_t req) {
366 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
367 }
368 
369 
370 
371 /*
372  * KCF software provider encrypt entry points.
373  */
374 static int
375 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
376     crypto_key_t *key, crypto_spi_ctx_template_t template,
377     crypto_req_handle_t req, boolean_t is_encrypt_init)
378 {
379 
380 /* EXPORT DELETE START */
381 
382 	aes_ctx_t *aes_ctx;
383 	int rv;
384 	int kmflag;
385 
386 	/*
387 	 * Only keys by value are supported by this module.
388 	 */
389 	if (key->ck_format != CRYPTO_KEY_RAW) {
390 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
391 	}
392 
393 	kmflag = crypto_kmflag(req);
394 	if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
395 	    != CRYPTO_SUCCESS)
396 		return (rv);
397 
398 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
399 	    is_encrypt_init);
400 	if (rv != CRYPTO_SUCCESS) {
401 		crypto_free_mode_ctx(aes_ctx);
402 		return (rv);
403 	}
404 
405 	ctx->cc_provider_private = aes_ctx;
406 
407 /* EXPORT DELETE END */
408 
409 	return (CRYPTO_SUCCESS);
410 }
411 
412 static void
413 aes_copy_block64(uint8_t *in, uint64_t *out)
414 {
415 	if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
416 		/* LINTED: pointer alignment */
417 		out[0] = *(uint64_t *)&in[0];
418 		/* LINTED: pointer alignment */
419 		out[1] = *(uint64_t *)&in[8];
420 	} else {
421 		uint8_t *iv8 = (uint8_t *)&out[0];
422 
423 		AES_COPY_BLOCK(in, iv8);
424 	}
425 }
426 
427 /* ARGSUSED */
428 static int
429 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
430     crypto_data_t *ciphertext, crypto_req_handle_t req)
431 {
432 	int ret = CRYPTO_FAILED;
433 
434 /* EXPORT DELETE START */
435 
436 	aes_ctx_t *aes_ctx;
437 	size_t saved_length, saved_offset, length_needed;
438 
439 	ASSERT(ctx->cc_provider_private != NULL);
440 	aes_ctx = ctx->cc_provider_private;
441 
442 	/*
443 	 * For block ciphers, plaintext must be a multiple of AES block size.
444 	 * This test is only valid for ciphers whose blocksize is a power of 2.
445 	 * Even though AES CCM mode is a block cipher, it does not
446 	 * require the plaintext to be a multiple of AES block size.
447 	 * The length requirement for AES CCM mode has already been checked
448 	 * at init time
449 	 */
450 	if (((aes_ctx->ac_flags & CTR_MODE) == 0) &&
451 	    ((aes_ctx->ac_flags & CCM_MODE) == 0) &&
452 	    (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
453 		return (CRYPTO_DATA_LEN_RANGE);
454 
455 	AES_ARG_INPLACE(plaintext, ciphertext);
456 
457 	/*
458 	 * We need to just return the length needed to store the output.
459 	 * We should not destroy the context for the following case.
460 	 */
461 	if (aes_ctx->ac_flags & CCM_MODE) {
462 		length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
463 	} else {
464 		length_needed = plaintext->cd_length;
465 	}
466 
467 	if (ciphertext->cd_length < length_needed) {
468 		ciphertext->cd_length = length_needed;
469 		return (CRYPTO_BUFFER_TOO_SMALL);
470 	}
471 
472 	saved_length = ciphertext->cd_length;
473 	saved_offset = ciphertext->cd_offset;
474 
475 	/*
476 	 * Do an update on the specified input data.
477 	 */
478 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
479 	if (ret != CRYPTO_SUCCESS) {
480 		return (ret);
481 	}
482 
483 	/*
484 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
485 	 * left-over unprocessed data, and compute the MAC
486 	 */
487 	if (aes_ctx->ac_flags & CCM_MODE) {
488 		/*
489 		 * aes_ccm_encrypt_final() will compute the MAC and append
490 		 * it to existing ciphertext. So, need to adjust the left over
491 		 * length value accordingly
492 		 */
493 
494 		/* order of following 2 lines MUST not be reversed */
495 		ciphertext->cd_offset = ciphertext->cd_length;
496 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
497 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
498 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
499 		if (ret != CRYPTO_SUCCESS) {
500 			return (ret);
501 		}
502 
503 		if (plaintext != ciphertext) {
504 			ciphertext->cd_length =
505 			    ciphertext->cd_offset - saved_offset;
506 		}
507 		ciphertext->cd_offset = saved_offset;
508 	}
509 
510 	ASSERT(aes_ctx->ac_remainder_len == 0);
511 	(void) aes_free_context(ctx);
512 
513 /* EXPORT DELETE END */
514 
515 	/* LINTED */
516 	return (ret);
517 }
518 
519 /* ARGSUSED */
520 static int
521 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
522     crypto_data_t *plaintext, crypto_req_handle_t req)
523 {
524 	int ret = CRYPTO_FAILED;
525 
526 /* EXPORT DELETE START */
527 
528 	aes_ctx_t *aes_ctx;
529 	off_t saved_offset;
530 	size_t saved_length;
531 
532 	ASSERT(ctx->cc_provider_private != NULL);
533 	aes_ctx = ctx->cc_provider_private;
534 
535 	/*
536 	 * For block ciphers, plaintext must be a multiple of AES block size.
537 	 * This test is only valid for ciphers whose blocksize is a power of 2.
538 	 * Even though AES CCM mode is a block cipher, it does not
539 	 * require the plaintext to be a multiple of AES block size.
540 	 * The length requirement for AES CCM mode has already been checked
541 	 * at init time
542 	 */
543 	if (((aes_ctx->ac_flags & CTR_MODE) == 0) &&
544 	    ((aes_ctx->ac_flags & CCM_MODE) == 0) &&
545 	    (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
546 		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
547 	}
548 
549 	AES_ARG_INPLACE(ciphertext, plaintext);
550 
551 	/*
552 	 * We need to just return the length needed to store the output.
553 	 * We should not destroy the context for the following case.
554 	 *
555 	 * For AES CCM mode, size of the plaintext will be MAC_SIZE
556 	 * smaller than size of the cipher text.
557 	 */
558 	if (aes_ctx->ac_flags & CCM_MODE) {
559 		if (plaintext->cd_length < aes_ctx->ac_data_len) {
560 			plaintext->cd_length = aes_ctx->ac_data_len;
561 			return (CRYPTO_BUFFER_TOO_SMALL);
562 		}
563 		saved_offset = plaintext->cd_offset;
564 		saved_length = plaintext->cd_length;
565 	} else if (plaintext->cd_length < ciphertext->cd_length) {
566 		plaintext->cd_length = ciphertext->cd_length;
567 		return (CRYPTO_BUFFER_TOO_SMALL);
568 	}
569 
570 	/*
571 	 * Do an update on the specified input data.
572 	 */
573 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
574 	if (ret != CRYPTO_SUCCESS) {
575 		goto cleanup;
576 	}
577 
578 	if (aes_ctx->ac_flags & CCM_MODE) {
579 		ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
580 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
581 
582 		/* order of following 2 lines MUST not be reversed */
583 		plaintext->cd_offset = plaintext->cd_length;
584 		plaintext->cd_length = saved_length - plaintext->cd_length;
585 
586 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
587 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
588 		    aes_xor_block);
589 		if (ret == CRYPTO_SUCCESS) {
590 			if (plaintext != ciphertext) {
591 				plaintext->cd_length =
592 				    plaintext->cd_offset - saved_offset;
593 			}
594 		} else {
595 			plaintext->cd_length = saved_length;
596 		}
597 
598 		plaintext->cd_offset = saved_offset;
599 	}
600 
601 	ASSERT(aes_ctx->ac_remainder_len == 0);
602 
603 cleanup:
604 	(void) aes_free_context(ctx);
605 
606 /* EXPORT DELETE END */
607 
608 	/* LINTED */
609 	return (ret);
610 }
611 
612 /* ARGSUSED */
613 static int
614 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
615     crypto_data_t *ciphertext, crypto_req_handle_t req)
616 {
617 	off_t saved_offset;
618 	size_t saved_length, out_len;
619 	int ret = CRYPTO_SUCCESS;
620 	aes_ctx_t *aes_ctx;
621 
622 	ASSERT(ctx->cc_provider_private != NULL);
623 	aes_ctx = ctx->cc_provider_private;
624 
625 	AES_ARG_INPLACE(plaintext, ciphertext);
626 
627 	/* compute number of bytes that will hold the ciphertext */
628 	out_len = aes_ctx->ac_remainder_len;
629 	out_len += plaintext->cd_length;
630 	out_len &= ~(AES_BLOCK_LEN - 1);
631 
632 	/* return length needed to store the output */
633 	if (ciphertext->cd_length < out_len) {
634 		ciphertext->cd_length = out_len;
635 		return (CRYPTO_BUFFER_TOO_SMALL);
636 	}
637 
638 	saved_offset = ciphertext->cd_offset;
639 	saved_length = ciphertext->cd_length;
640 
641 
642 	/*
643 	 * Do the AES update on the specified input data.
644 	 */
645 	switch (plaintext->cd_format) {
646 	case CRYPTO_DATA_RAW:
647 		ret = crypto_update_iov(ctx->cc_provider_private,
648 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
649 		    aes_copy_block64);
650 		break;
651 	case CRYPTO_DATA_UIO:
652 		ret = crypto_update_uio(ctx->cc_provider_private,
653 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
654 		    aes_copy_block64);
655 		break;
656 	case CRYPTO_DATA_MBLK:
657 		ret = crypto_update_mp(ctx->cc_provider_private,
658 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
659 		    aes_copy_block64);
660 		break;
661 	default:
662 		ret = CRYPTO_ARGUMENTS_BAD;
663 	}
664 
665 	/*
666 	 * Since AES counter mode is a stream cipher, we call
667 	 * ctr_mode_final() to pick up any remaining bytes.
668 	 * It is an internal function that does not destroy
669 	 * the context like *normal* final routines.
670 	 */
671 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
672 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
673 		    ciphertext, aes_encrypt_block);
674 	}
675 
676 	if (ret == CRYPTO_SUCCESS) {
677 		if (plaintext != ciphertext)
678 			ciphertext->cd_length =
679 			    ciphertext->cd_offset - saved_offset;
680 	} else {
681 		ciphertext->cd_length = saved_length;
682 	}
683 	ciphertext->cd_offset = saved_offset;
684 
685 	return (ret);
686 }
687 
688 /* ARGSUSED */
689 static int
690 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
691     crypto_data_t *plaintext, crypto_req_handle_t req)
692 {
693 	off_t saved_offset;
694 	size_t saved_length, out_len;
695 	int ret = CRYPTO_SUCCESS;
696 	aes_ctx_t *aes_ctx;
697 
698 	ASSERT(ctx->cc_provider_private != NULL);
699 	aes_ctx = ctx->cc_provider_private;
700 
701 	AES_ARG_INPLACE(ciphertext, plaintext);
702 
703 	/* compute number of bytes that will hold the plaintext */
704 	out_len = aes_ctx->ac_remainder_len;
705 	out_len += ciphertext->cd_length;
706 	out_len &= ~(AES_BLOCK_LEN - 1);
707 
708 	/* return length needed to store the output */
709 	if (plaintext->cd_length < out_len) {
710 		plaintext->cd_length = out_len;
711 		return (CRYPTO_BUFFER_TOO_SMALL);
712 	}
713 
714 	saved_offset = plaintext->cd_offset;
715 	saved_length = plaintext->cd_length;
716 
717 	/*
718 	 * Do the AES update on the specified input data.
719 	 */
720 	switch (ciphertext->cd_format) {
721 	case CRYPTO_DATA_RAW:
722 		ret = crypto_update_iov(ctx->cc_provider_private,
723 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
724 		    aes_copy_block64);
725 		break;
726 	case CRYPTO_DATA_UIO:
727 		ret = crypto_update_uio(ctx->cc_provider_private,
728 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
729 		    aes_copy_block64);
730 		break;
731 	case CRYPTO_DATA_MBLK:
732 		ret = crypto_update_mp(ctx->cc_provider_private,
733 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
734 		    aes_copy_block64);
735 		break;
736 	default:
737 		ret = CRYPTO_ARGUMENTS_BAD;
738 	}
739 
740 	/*
741 	 * Since AES counter mode is a stream cipher, we call
742 	 * ctr_mode_final() to pick up any remaining bytes.
743 	 * It is an internal function that does not destroy
744 	 * the context like *normal* final routines.
745 	 */
746 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
747 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
748 		    aes_encrypt_block);
749 		if (ret == CRYPTO_DATA_LEN_RANGE)
750 			ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
751 	}
752 
753 	if (ret == CRYPTO_SUCCESS) {
754 		if (ciphertext != plaintext)
755 			plaintext->cd_length =
756 			    plaintext->cd_offset - saved_offset;
757 	} else {
758 		plaintext->cd_length = saved_length;
759 	}
760 	plaintext->cd_offset = saved_offset;
761 
762 
763 	return (ret);
764 }
765 
766 /* ARGSUSED */
767 static int
768 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
769     crypto_req_handle_t req)
770 {
771 
772 /* EXPORT DELETE START */
773 
774 	aes_ctx_t *aes_ctx;
775 	int ret;
776 
777 	ASSERT(ctx->cc_provider_private != NULL);
778 	aes_ctx = ctx->cc_provider_private;
779 
780 	if (data->cd_format != CRYPTO_DATA_RAW &&
781 	    data->cd_format != CRYPTO_DATA_UIO &&
782 	    data->cd_format != CRYPTO_DATA_MBLK) {
783 		return (CRYPTO_ARGUMENTS_BAD);
784 	}
785 
786 	if (aes_ctx->ac_flags & CTR_MODE) {
787 		if (aes_ctx->ac_remainder_len > 0) {
788 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
789 			    aes_encrypt_block);
790 			if (ret != CRYPTO_SUCCESS)
791 				return (ret);
792 		}
793 	} else if (aes_ctx->ac_flags & CCM_MODE) {
794 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
795 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
796 		if (ret != CRYPTO_SUCCESS) {
797 			return (ret);
798 		}
799 	} else {
800 		/*
801 		 * There must be no unprocessed plaintext.
802 		 * This happens if the length of the last data is
803 		 * not a multiple of the AES block length.
804 		 */
805 		if (aes_ctx->ac_remainder_len > 0) {
806 			return (CRYPTO_DATA_LEN_RANGE);
807 		}
808 		data->cd_length = 0;
809 	}
810 
811 	(void) aes_free_context(ctx);
812 
813 /* EXPORT DELETE END */
814 
815 	return (CRYPTO_SUCCESS);
816 }
817 
818 /* ARGSUSED */
819 static int
820 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
821     crypto_req_handle_t req)
822 {
823 
824 /* EXPORT DELETE START */
825 
826 	aes_ctx_t *aes_ctx;
827 	int ret;
828 	off_t saved_offset;
829 	size_t saved_length;
830 
831 	ASSERT(ctx->cc_provider_private != NULL);
832 	aes_ctx = ctx->cc_provider_private;
833 
834 	if (data->cd_format != CRYPTO_DATA_RAW &&
835 	    data->cd_format != CRYPTO_DATA_UIO &&
836 	    data->cd_format != CRYPTO_DATA_MBLK) {
837 		return (CRYPTO_ARGUMENTS_BAD);
838 	}
839 
840 	/*
841 	 * There must be no unprocessed ciphertext.
842 	 * This happens if the length of the last ciphertext is
843 	 * not a multiple of the AES block length.
844 	 */
845 	if (aes_ctx->ac_remainder_len > 0) {
846 		if ((aes_ctx->ac_flags & CTR_MODE) == 0)
847 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
848 		else {
849 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
850 			    aes_encrypt_block);
851 			if (ret == CRYPTO_DATA_LEN_RANGE)
852 				ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
853 			if (ret != CRYPTO_SUCCESS)
854 				return (ret);
855 		}
856 	}
857 
858 	if (aes_ctx->ac_flags & CCM_MODE) {
859 		/*
860 		 * This is where all the plaintext is returned, make sure
861 		 * the plaintext buffer is big enough
862 		 */
863 		size_t pt_len = aes_ctx->ac_data_len;
864 		if (data->cd_length < pt_len) {
865 			data->cd_length = pt_len;
866 			return (CRYPTO_BUFFER_TOO_SMALL);
867 		}
868 
869 		ASSERT(aes_ctx->ac_processed_data_len == pt_len);
870 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
871 		saved_offset = data->cd_offset;
872 		saved_length = data->cd_length;
873 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
874 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
875 		    aes_xor_block);
876 		if (ret == CRYPTO_SUCCESS) {
877 			data->cd_length = data->cd_offset - saved_offset;
878 		} else {
879 			data->cd_length = saved_length;
880 		}
881 
882 		data->cd_offset = saved_offset;
883 		if (ret != CRYPTO_SUCCESS) {
884 			return (ret);
885 		}
886 	}
887 
888 
889 	if (((aes_ctx->ac_flags & CTR_MODE) == 0) &&
890 	    ((aes_ctx->ac_flags & CCM_MODE) == 0)) {
891 		data->cd_length = 0;
892 	}
893 
894 	(void) aes_free_context(ctx);
895 
896 /* EXPORT DELETE END */
897 
898 	return (CRYPTO_SUCCESS);
899 }
900 
901 /* ARGSUSED */
902 static int
903 aes_encrypt_atomic(crypto_provider_handle_t provider,
904     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
905     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
906     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
907 {
908 	aes_ctx_t aes_ctx;	/* on the stack */
909 	off_t saved_offset;
910 	size_t saved_length;
911 	int ret;
912 
913 	AES_ARG_INPLACE(plaintext, ciphertext);
914 
915 	if ((mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) &&
916 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)) {
917 		/*
918 		 * Plaintext must be a multiple of AES block size.
919 		 * This test only works for non-padded mechanisms
920 		 * when blocksize is 2^N.
921 		 */
922 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
923 			return (CRYPTO_DATA_LEN_RANGE);
924 	}
925 
926 	/* return length needed to store the output */
927 	if (ciphertext->cd_length < plaintext->cd_length) {
928 		ciphertext->cd_length = plaintext->cd_length;
929 		return (CRYPTO_BUFFER_TOO_SMALL);
930 	}
931 
932 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
933 		return (ret);
934 
935 	bzero(&aes_ctx, sizeof (aes_ctx_t));
936 
937 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
938 	    crypto_kmflag(req), B_TRUE);
939 	if (ret != CRYPTO_SUCCESS)
940 		return (ret);
941 
942 	if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
943 		size_t length_needed
944 		    = plaintext->cd_length + aes_ctx.ac_mac_len;
945 		if (ciphertext->cd_length < length_needed) {
946 			ciphertext->cd_length = length_needed;
947 			return (CRYPTO_BUFFER_TOO_SMALL);
948 		}
949 	}
950 
951 
952 	saved_offset = ciphertext->cd_offset;
953 	saved_length = ciphertext->cd_length;
954 
955 	/*
956 	 * Do an update on the specified input data.
957 	 */
958 	switch (plaintext->cd_format) {
959 	case CRYPTO_DATA_RAW:
960 		ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
961 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
962 		break;
963 	case CRYPTO_DATA_UIO:
964 		ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
965 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
966 		break;
967 	case CRYPTO_DATA_MBLK:
968 		ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
969 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
970 		break;
971 	default:
972 		ret = CRYPTO_ARGUMENTS_BAD;
973 	}
974 
975 	if (ret == CRYPTO_SUCCESS) {
976 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
977 			ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
978 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
979 			    aes_xor_block);
980 			if (ret != CRYPTO_SUCCESS)
981 				goto out;
982 			ASSERT(aes_ctx.ac_remainder_len == 0);
983 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
984 			if (aes_ctx.ac_remainder_len > 0) {
985 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
986 				    ciphertext, aes_encrypt_block);
987 				if (ret != CRYPTO_SUCCESS)
988 					goto out;
989 			}
990 		} else {
991 			ASSERT(aes_ctx.ac_remainder_len == 0);
992 		}
993 
994 		if (plaintext != ciphertext) {
995 			ciphertext->cd_length =
996 			    ciphertext->cd_offset - saved_offset;
997 		}
998 	} else {
999 		ciphertext->cd_length = saved_length;
1000 	}
1001 	ciphertext->cd_offset = saved_offset;
1002 
1003 out:
1004 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1005 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1006 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1007 	}
1008 
1009 	return (ret);
1010 }
1011 
1012 /* ARGSUSED */
1013 static int
1014 aes_decrypt_atomic(crypto_provider_handle_t provider,
1015     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1016     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1017     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1018 {
1019 	aes_ctx_t aes_ctx;	/* on the stack */
1020 	off_t saved_offset;
1021 	size_t saved_length;
1022 	int ret;
1023 
1024 	AES_ARG_INPLACE(ciphertext, plaintext);
1025 
1026 	/*
1027 	 * For block ciphers, ciphertext must be a multiple of AES block size.
1028 	 * This test is only valid for non-padded mechanisms
1029 	 * when blocksize is 2^N
1030 	 * Even though AES CCM mode is a block cipher, it does not
1031 	 * require the plaintext to be a multiple of AES block size.
1032 	 * The length requirement for AES CCM mode will be checked
1033 	 * at init time
1034 	 */
1035 	if ((mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) &&
1036 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE) &&
1037 	    ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0))
1038 		return (CRYPTO_DATA_LEN_RANGE);
1039 
1040 	/*
1041 	 * return length needed to store the output, length requirement
1042 	 * for AES CCM mode can not be determined until later
1043 	 */
1044 	if ((plaintext->cd_length < ciphertext->cd_length) &&
1045 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)) {
1046 		plaintext->cd_length = ciphertext->cd_length;
1047 		return (CRYPTO_BUFFER_TOO_SMALL);
1048 	}
1049 
1050 
1051 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1052 		return (ret);
1053 
1054 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1055 
1056 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1057 	    crypto_kmflag(req), B_FALSE);
1058 	if (ret != CRYPTO_SUCCESS)
1059 		return (ret);
1060 
1061 	/* check length requirement for AES CCM mode now */
1062 	if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1063 		if (plaintext->cd_length < aes_ctx.ac_data_len) {
1064 			plaintext->cd_length = aes_ctx.ac_data_len;
1065 			ret = CRYPTO_BUFFER_TOO_SMALL;
1066 			goto out;
1067 		}
1068 	}
1069 
1070 	saved_offset = plaintext->cd_offset;
1071 	saved_length = plaintext->cd_length;
1072 
1073 	/*
1074 	 * Do an update on the specified input data.
1075 	 */
1076 	switch (ciphertext->cd_format) {
1077 	case CRYPTO_DATA_RAW:
1078 		ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1079 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1080 		break;
1081 	case CRYPTO_DATA_UIO:
1082 		ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1083 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1084 		break;
1085 	case CRYPTO_DATA_MBLK:
1086 		ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1087 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1088 		break;
1089 	default:
1090 		ret = CRYPTO_ARGUMENTS_BAD;
1091 	}
1092 
1093 	if (ret == CRYPTO_SUCCESS) {
1094 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1095 			ASSERT(aes_ctx.ac_processed_data_len
1096 			    == aes_ctx.ac_data_len);
1097 			ASSERT(aes_ctx.ac_processed_mac_len
1098 			    == aes_ctx.ac_mac_len);
1099 			ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1100 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1101 			    aes_copy_block, aes_xor_block);
1102 			ASSERT(aes_ctx.ac_remainder_len == 0);
1103 			if ((ret == CRYPTO_SUCCESS) &&
1104 			    (ciphertext != plaintext)) {
1105 				plaintext->cd_length =
1106 				    plaintext->cd_offset - saved_offset;
1107 			} else {
1108 				plaintext->cd_length = saved_length;
1109 			}
1110 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1111 			ASSERT(aes_ctx.ac_remainder_len == 0);
1112 			if (ciphertext != plaintext)
1113 				plaintext->cd_length =
1114 				    plaintext->cd_offset - saved_offset;
1115 		} else {
1116 			if (aes_ctx.ac_remainder_len > 0) {
1117 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1118 				    plaintext, aes_encrypt_block);
1119 				if (ret == CRYPTO_DATA_LEN_RANGE)
1120 					ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1121 				if (ret != CRYPTO_SUCCESS)
1122 					goto out;
1123 			}
1124 			if (ciphertext != plaintext)
1125 				plaintext->cd_length =
1126 				    plaintext->cd_offset - saved_offset;
1127 		}
1128 	} else {
1129 		plaintext->cd_length = saved_length;
1130 	}
1131 	plaintext->cd_offset = saved_offset;
1132 
1133 out:
1134 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1135 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1136 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1137 	}
1138 
1139 	if (aes_ctx.ac_flags & CCM_MODE) {
1140 		if (aes_ctx.ac_pt_buf != NULL) {
1141 			kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1142 		}
1143 	}
1144 
1145 	return (ret);
1146 }
1147 
1148 /*
1149  * KCF software provider context template entry points.
1150  */
1151 /* ARGSUSED */
1152 static int
1153 aes_create_ctx_template(crypto_provider_handle_t provider,
1154     crypto_mechanism_t *mechanism, crypto_key_t *key,
1155     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1156 {
1157 
1158 /* EXPORT DELETE START */
1159 
1160 	void *keysched;
1161 	size_t size;
1162 	int rv;
1163 
1164 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1165 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1166 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1167 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)
1168 		return (CRYPTO_MECHANISM_INVALID);
1169 
1170 	if ((keysched = aes_alloc_keysched(&size,
1171 	    crypto_kmflag(req))) == NULL) {
1172 		return (CRYPTO_HOST_MEMORY);
1173 	}
1174 
1175 	/*
1176 	 * Initialize key schedule.  Key length information is stored
1177 	 * in the key.
1178 	 */
1179 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1180 		bzero(keysched, size);
1181 		kmem_free(keysched, size);
1182 		return (rv);
1183 	}
1184 
1185 	*tmpl = keysched;
1186 	*tmpl_size = size;
1187 
1188 /* EXPORT DELETE END */
1189 
1190 	return (CRYPTO_SUCCESS);
1191 }
1192 
1193 /* ARGSUSED */
1194 static int
1195 aes_free_context(crypto_ctx_t *ctx)
1196 {
1197 
1198 /* EXPORT DELETE START */
1199 
1200 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1201 
1202 	if (aes_ctx != NULL) {
1203 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1204 			ASSERT(aes_ctx->ac_keysched_len != 0);
1205 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1206 			kmem_free(aes_ctx->ac_keysched,
1207 			    aes_ctx->ac_keysched_len);
1208 		}
1209 		crypto_free_mode_ctx(aes_ctx);
1210 		ctx->cc_provider_private = NULL;
1211 	}
1212 
1213 /* EXPORT DELETE END */
1214 
1215 	return (CRYPTO_SUCCESS);
1216 }
1217 
1218 /* ARGSUSED */
1219 static int
1220 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1221     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1222     boolean_t is_encrypt_init)
1223 {
1224 	int rv = CRYPTO_SUCCESS;
1225 
1226 /* EXPORT DELETE START */
1227 
1228 	void *keysched;
1229 	size_t size;
1230 
1231 	if (template == NULL) {
1232 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1233 			return (CRYPTO_HOST_MEMORY);
1234 		/*
1235 		 * Initialize key schedule.
1236 		 * Key length is stored in the key.
1237 		 */
1238 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1239 			kmem_free(keysched, size);
1240 			return (rv);
1241 		}
1242 
1243 		aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1244 		aes_ctx->ac_keysched_len = size;
1245 	} else {
1246 		keysched = template;
1247 	}
1248 	aes_ctx->ac_keysched = keysched;
1249 
1250 	switch (mechanism->cm_type) {
1251 	case AES_CBC_MECH_INFO_TYPE:
1252 		rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1253 		    mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1254 		break;
1255 	case AES_CTR_MECH_INFO_TYPE: {
1256 		CK_AES_CTR_PARAMS *pp;
1257 
1258 		if (mechanism->cm_param == NULL ||
1259 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1260 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1261 		}
1262 		pp = (CK_AES_CTR_PARAMS *)mechanism->cm_param;
1263 		rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1264 		    pp->cb, aes_copy_block);
1265 		break;
1266 	}
1267 	case AES_CCM_MECH_INFO_TYPE:
1268 		if (mechanism->cm_param == NULL ||
1269 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1270 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1271 		}
1272 		rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1273 		    kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1274 		    aes_xor_block);
1275 		break;
1276 	case AES_ECB_MECH_INFO_TYPE:
1277 		aes_ctx->ac_flags |= ECB_MODE;
1278 	}
1279 
1280 	if (rv != CRYPTO_SUCCESS) {
1281 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1282 			bzero(keysched, size);
1283 			kmem_free(keysched, size);
1284 		}
1285 	}
1286 
1287 /* EXPORT DELETE END */
1288 
1289 	return (rv);
1290 }
1291