xref: /illumos-gate/usr/src/uts/common/crypto/io/aes.c (revision 89b2a9fbeabf42fa54594df0e5927bcc50a07cc9)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 /*
27  * AES provider for the Kernel Cryptographic Framework (KCF)
28  */
29 
30 #include <sys/types.h>
31 #include <sys/systm.h>
32 #include <sys/modctl.h>
33 #include <sys/cmn_err.h>
34 #include <sys/ddi.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/crypto/spi.h>
38 #include <sys/sysmacros.h>
39 #include <sys/strsun.h>
40 #include <modes/modes.h>
41 #define	_AES_FIPS_POST
42 #define	_AES_IMPL
43 #include <aes/aes_impl.h>
44 
45 extern struct mod_ops mod_cryptoops;
46 
47 /*
48  * Module linkage information for the kernel.
49  */
50 static struct modlcrypto modlcrypto = {
51 	&mod_cryptoops,
52 	"AES Kernel SW Provider"
53 };
54 
55 static struct modlinkage modlinkage = {
56 	MODREV_1,
57 	(void *)&modlcrypto,
58 	NULL
59 };
60 
61 /*
62  * The following definitions are to keep EXPORT_SRC happy.
63  */
64 #ifndef AES_MIN_KEY_BYTES
65 #define	AES_MIN_KEY_BYTES		0
66 #endif
67 
68 #ifndef AES_MAX_KEY_BYTES
69 #define	AES_MAX_KEY_BYTES		0
70 #endif
71 
72 /*
73  * Mechanism info structure passed to KCF during registration.
74  */
75 static crypto_mech_info_t aes_mech_info_tab[] = {
76 	/* AES_ECB */
77 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
78 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
79 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
80 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
81 	/* AES_CBC */
82 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
83 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
84 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
85 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
86 	/* AES_CTR */
87 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
88 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
89 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
90 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
91 	/* AES_CCM */
92 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
93 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
94 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
95 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
96 	/* AES_GCM */
97 	{SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
98 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
99 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
100 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
101 	/* AES_GMAC */
102 	{SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
103 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
104 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
105 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
106 	    CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
107 	    CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
108 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
109 };
110 
111 /* operations are in-place if the output buffer is NULL */
112 #define	AES_ARG_INPLACE(input, output)				\
113 	if ((output) == NULL)					\
114 		(output) = (input);
115 
116 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
117 
118 static crypto_control_ops_t aes_control_ops = {
119 	aes_provider_status
120 };
121 
122 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
123     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
124 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
125     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
126 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
127     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
128 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
129     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
130 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
131     crypto_req_handle_t);
132 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
133     crypto_req_handle_t);
134 
135 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
136     crypto_req_handle_t);
137 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
138     crypto_data_t *, crypto_req_handle_t);
139 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
140     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
141     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
142 
143 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
144     crypto_req_handle_t);
145 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
146     crypto_data_t *, crypto_req_handle_t);
147 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
148     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
149     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
150 
151 static crypto_cipher_ops_t aes_cipher_ops = {
152 	aes_encrypt_init,
153 	aes_encrypt,
154 	aes_encrypt_update,
155 	aes_encrypt_final,
156 	aes_encrypt_atomic,
157 	aes_decrypt_init,
158 	aes_decrypt,
159 	aes_decrypt_update,
160 	aes_decrypt_final,
161 	aes_decrypt_atomic
162 };
163 
164 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
165     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
166     crypto_spi_ctx_template_t, crypto_req_handle_t);
167 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
168     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
169     crypto_spi_ctx_template_t, crypto_req_handle_t);
170 
171 static crypto_mac_ops_t aes_mac_ops = {
172 	NULL,
173 	NULL,
174 	NULL,
175 	NULL,
176 	aes_mac_atomic,
177 	aes_mac_verify_atomic
178 };
179 
180 static int aes_create_ctx_template(crypto_provider_handle_t,
181     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
182     size_t *, crypto_req_handle_t);
183 static int aes_free_context(crypto_ctx_t *);
184 
185 static crypto_ctx_ops_t aes_ctx_ops = {
186 	aes_create_ctx_template,
187 	aes_free_context
188 };
189 
190 static void aes_POST(int *);
191 
192 static crypto_fips140_ops_t aes_fips140_ops = {
193 	aes_POST
194 };
195 
196 static crypto_ops_t aes_crypto_ops = {
197 	&aes_control_ops,
198 	NULL,
199 	&aes_cipher_ops,
200 	&aes_mac_ops,
201 	NULL,
202 	NULL,
203 	NULL,
204 	NULL,
205 	NULL,
206 	NULL,
207 	NULL,
208 	NULL,
209 	NULL,
210 	&aes_ctx_ops,
211 	NULL,
212 	NULL,
213 	&aes_fips140_ops
214 };
215 
216 static crypto_provider_info_t aes_prov_info = {
217 	CRYPTO_SPI_VERSION_4,
218 	"AES Software Provider",
219 	CRYPTO_SW_PROVIDER,
220 	{&modlinkage},
221 	NULL,
222 	&aes_crypto_ops,
223 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
224 	aes_mech_info_tab
225 };
226 
227 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
228 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
229 
230 int
231 _init(void)
232 {
233 	int ret;
234 
235 	/*
236 	 * Register with KCF. If the registration fails, return error.
237 	 */
238 	if ((ret = crypto_register_provider(&aes_prov_info,
239 	    &aes_prov_handle)) != CRYPTO_SUCCESS) {
240 		cmn_err(CE_WARN, "%s _init: crypto_register_provider()"
241 		    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
242 		return (EACCES);
243 	}
244 
245 	if ((ret = mod_install(&modlinkage)) != 0) {
246 		int rv;
247 
248 		ASSERT(aes_prov_handle != NULL);
249 		/* We should not return if the unregister returns busy. */
250 		while ((rv = crypto_unregister_provider(aes_prov_handle))
251 		    == CRYPTO_BUSY) {
252 			cmn_err(CE_WARN,
253 			    "%s _init: crypto_unregister_provider() "
254 			    "failed (0x%x). Retrying.",
255 			    CRYPTO_PROVIDER_NAME, rv);
256 			/* wait 10 seconds and try again. */
257 			delay(10 * drv_usectohz(1000000));
258 		}
259 	}
260 
261 	return (ret);
262 }
263 
264 int
265 _fini(void)
266 {
267 	int ret;
268 
269 	/*
270 	 * Unregister from KCF if previous registration succeeded.
271 	 */
272 	if (aes_prov_handle != NULL) {
273 		if ((ret = crypto_unregister_provider(aes_prov_handle)) !=
274 		    CRYPTO_SUCCESS) {
275 			cmn_err(CE_WARN,
276 			    "%s _fini: crypto_unregister_provider() "
277 			    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
278 			return (EBUSY);
279 		}
280 		aes_prov_handle = NULL;
281 	}
282 
283 	return (mod_remove(&modlinkage));
284 }
285 
286 int
287 _info(struct modinfo *modinfop)
288 {
289 	return (mod_info(&modlinkage, modinfop));
290 }
291 
292 
293 static int
294 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
295 {
296 	void *p = NULL;
297 	boolean_t param_required = B_TRUE;
298 	size_t param_len;
299 	void *(*alloc_fun)(int);
300 	int rv = CRYPTO_SUCCESS;
301 
302 	switch (mechanism->cm_type) {
303 	case AES_ECB_MECH_INFO_TYPE:
304 		param_required = B_FALSE;
305 		alloc_fun = ecb_alloc_ctx;
306 		break;
307 	case AES_CBC_MECH_INFO_TYPE:
308 		param_len = AES_BLOCK_LEN;
309 		alloc_fun = cbc_alloc_ctx;
310 		break;
311 	case AES_CTR_MECH_INFO_TYPE:
312 		param_len = sizeof (CK_AES_CTR_PARAMS);
313 		alloc_fun = ctr_alloc_ctx;
314 		break;
315 	case AES_CCM_MECH_INFO_TYPE:
316 		param_len = sizeof (CK_AES_CCM_PARAMS);
317 		alloc_fun = ccm_alloc_ctx;
318 		break;
319 	case AES_GCM_MECH_INFO_TYPE:
320 		param_len = sizeof (CK_AES_GCM_PARAMS);
321 		alloc_fun = gcm_alloc_ctx;
322 		break;
323 	case AES_GMAC_MECH_INFO_TYPE:
324 		param_len = sizeof (CK_AES_GMAC_PARAMS);
325 		alloc_fun = gmac_alloc_ctx;
326 		break;
327 	default:
328 		rv = CRYPTO_MECHANISM_INVALID;
329 	}
330 	if (param_required && mechanism->cm_param != NULL &&
331 	    mechanism->cm_param_len != param_len) {
332 		rv = CRYPTO_MECHANISM_PARAM_INVALID;
333 	}
334 	if (ctx != NULL) {
335 		p = (alloc_fun)(kmflag);
336 		*ctx = p;
337 	}
338 	return (rv);
339 }
340 
341 /* EXPORT DELETE START */
342 
343 /*
344  * Initialize key schedules for AES
345  */
346 static int
347 init_keysched(crypto_key_t *key, void *newbie)
348 {
349 	/*
350 	 * Only keys by value are supported by this module.
351 	 */
352 	switch (key->ck_format) {
353 	case CRYPTO_KEY_RAW:
354 		if (key->ck_length < AES_MINBITS ||
355 		    key->ck_length > AES_MAXBITS) {
356 			return (CRYPTO_KEY_SIZE_RANGE);
357 		}
358 
359 		/* key length must be either 128, 192, or 256 */
360 		if ((key->ck_length & 63) != 0)
361 			return (CRYPTO_KEY_SIZE_RANGE);
362 		break;
363 	default:
364 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
365 	}
366 
367 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
368 	return (CRYPTO_SUCCESS);
369 }
370 
371 /* EXPORT DELETE END */
372 
373 /*
374  * KCF software provider control entry points.
375  */
376 /* ARGSUSED */
377 static void
378 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
379 {
380 	*status = CRYPTO_PROVIDER_READY;
381 }
382 
383 static int
384 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
385     crypto_key_t *key, crypto_spi_ctx_template_t template,
386     crypto_req_handle_t req) {
387 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
388 }
389 
390 static int
391 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
392     crypto_key_t *key, crypto_spi_ctx_template_t template,
393     crypto_req_handle_t req) {
394 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
395 }
396 
397 
398 
399 /*
400  * KCF software provider encrypt entry points.
401  */
402 static int
403 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
404     crypto_key_t *key, crypto_spi_ctx_template_t template,
405     crypto_req_handle_t req, boolean_t is_encrypt_init)
406 {
407 
408 /* EXPORT DELETE START */
409 
410 	aes_ctx_t *aes_ctx;
411 	int rv;
412 	int kmflag;
413 
414 	/*
415 	 * Only keys by value are supported by this module.
416 	 */
417 	if (key->ck_format != CRYPTO_KEY_RAW) {
418 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
419 	}
420 
421 	kmflag = crypto_kmflag(req);
422 	if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
423 	    != CRYPTO_SUCCESS)
424 		return (rv);
425 
426 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
427 	    is_encrypt_init);
428 	if (rv != CRYPTO_SUCCESS) {
429 		crypto_free_mode_ctx(aes_ctx);
430 		return (rv);
431 	}
432 
433 	ctx->cc_provider_private = aes_ctx;
434 
435 /* EXPORT DELETE END */
436 
437 	return (CRYPTO_SUCCESS);
438 }
439 
440 static void
441 aes_copy_block64(uint8_t *in, uint64_t *out)
442 {
443 	if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
444 		/* LINTED: pointer alignment */
445 		out[0] = *(uint64_t *)&in[0];
446 		/* LINTED: pointer alignment */
447 		out[1] = *(uint64_t *)&in[8];
448 	} else {
449 		uint8_t *iv8 = (uint8_t *)&out[0];
450 
451 		AES_COPY_BLOCK(in, iv8);
452 	}
453 }
454 
455 
456 static int
457 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
458     crypto_data_t *ciphertext, crypto_req_handle_t req)
459 {
460 	int ret = CRYPTO_FAILED;
461 
462 /* EXPORT DELETE START */
463 
464 	aes_ctx_t *aes_ctx;
465 	size_t saved_length, saved_offset, length_needed;
466 
467 	ASSERT(ctx->cc_provider_private != NULL);
468 	aes_ctx = ctx->cc_provider_private;
469 
470 	/*
471 	 * For block ciphers, plaintext must be a multiple of AES block size.
472 	 * This test is only valid for ciphers whose blocksize is a power of 2.
473 	 */
474 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
475 	    == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
476 		return (CRYPTO_DATA_LEN_RANGE);
477 
478 	AES_ARG_INPLACE(plaintext, ciphertext);
479 
480 	/*
481 	 * We need to just return the length needed to store the output.
482 	 * We should not destroy the context for the following case.
483 	 */
484 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
485 	case CCM_MODE:
486 		length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
487 		break;
488 	case GCM_MODE:
489 		length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
490 		break;
491 	case GMAC_MODE:
492 		if (plaintext->cd_length != 0)
493 			return (CRYPTO_ARGUMENTS_BAD);
494 
495 		length_needed = aes_ctx->ac_tag_len;
496 		break;
497 	default:
498 		length_needed = plaintext->cd_length;
499 	}
500 
501 	if (ciphertext->cd_length < length_needed) {
502 		ciphertext->cd_length = length_needed;
503 		return (CRYPTO_BUFFER_TOO_SMALL);
504 	}
505 
506 	saved_length = ciphertext->cd_length;
507 	saved_offset = ciphertext->cd_offset;
508 
509 	/*
510 	 * Do an update on the specified input data.
511 	 */
512 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
513 	if (ret != CRYPTO_SUCCESS) {
514 		return (ret);
515 	}
516 
517 	/*
518 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
519 	 * left-over unprocessed data, and compute the MAC
520 	 */
521 	if (aes_ctx->ac_flags & CCM_MODE) {
522 		/*
523 		 * ccm_encrypt_final() will compute the MAC and append
524 		 * it to existing ciphertext. So, need to adjust the left over
525 		 * length value accordingly
526 		 */
527 
528 		/* order of following 2 lines MUST not be reversed */
529 		ciphertext->cd_offset = ciphertext->cd_length;
530 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
531 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
532 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
533 		if (ret != CRYPTO_SUCCESS) {
534 			return (ret);
535 		}
536 
537 		if (plaintext != ciphertext) {
538 			ciphertext->cd_length =
539 			    ciphertext->cd_offset - saved_offset;
540 		}
541 		ciphertext->cd_offset = saved_offset;
542 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
543 		/*
544 		 * gcm_encrypt_final() will compute the MAC and append
545 		 * it to existing ciphertext. So, need to adjust the left over
546 		 * length value accordingly
547 		 */
548 
549 		/* order of following 2 lines MUST not be reversed */
550 		ciphertext->cd_offset = ciphertext->cd_length;
551 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
552 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
553 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
554 		    aes_xor_block);
555 		if (ret != CRYPTO_SUCCESS) {
556 			return (ret);
557 		}
558 
559 		if (plaintext != ciphertext) {
560 			ciphertext->cd_length =
561 			    ciphertext->cd_offset - saved_offset;
562 		}
563 		ciphertext->cd_offset = saved_offset;
564 	}
565 
566 	ASSERT(aes_ctx->ac_remainder_len == 0);
567 	(void) aes_free_context(ctx);
568 
569 /* EXPORT DELETE END */
570 
571 	return (ret);
572 }
573 
574 
575 static int
576 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
577     crypto_data_t *plaintext, crypto_req_handle_t req)
578 {
579 	int ret = CRYPTO_FAILED;
580 
581 /* EXPORT DELETE START */
582 
583 	aes_ctx_t *aes_ctx;
584 	off_t saved_offset;
585 	size_t saved_length, length_needed;
586 
587 	ASSERT(ctx->cc_provider_private != NULL);
588 	aes_ctx = ctx->cc_provider_private;
589 
590 	/*
591 	 * For block ciphers, plaintext must be a multiple of AES block size.
592 	 * This test is only valid for ciphers whose blocksize is a power of 2.
593 	 */
594 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
595 	    == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
596 		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
597 	}
598 
599 	AES_ARG_INPLACE(ciphertext, plaintext);
600 
601 	/*
602 	 * Return length needed to store the output.
603 	 * Do not destroy context when plaintext buffer is too small.
604 	 *
605 	 * CCM:  plaintext is MAC len smaller than cipher text
606 	 * GCM:  plaintext is TAG len smaller than cipher text
607 	 * GMAC: plaintext length must be zero
608 	 */
609 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
610 	case CCM_MODE:
611 		length_needed = aes_ctx->ac_processed_data_len;
612 		break;
613 	case GCM_MODE:
614 		length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
615 		break;
616 	case GMAC_MODE:
617 		if (plaintext->cd_length != 0)
618 			return (CRYPTO_ARGUMENTS_BAD);
619 
620 		length_needed = 0;
621 		break;
622 	default:
623 		length_needed = ciphertext->cd_length;
624 	}
625 
626 	if (plaintext->cd_length < length_needed) {
627 		plaintext->cd_length = length_needed;
628 		return (CRYPTO_BUFFER_TOO_SMALL);
629 	}
630 
631 	saved_offset = plaintext->cd_offset;
632 	saved_length = plaintext->cd_length;
633 
634 	/*
635 	 * Do an update on the specified input data.
636 	 */
637 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
638 	if (ret != CRYPTO_SUCCESS) {
639 		goto cleanup;
640 	}
641 
642 	if (aes_ctx->ac_flags & CCM_MODE) {
643 		ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
644 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
645 
646 		/* order of following 2 lines MUST not be reversed */
647 		plaintext->cd_offset = plaintext->cd_length;
648 		plaintext->cd_length = saved_length - plaintext->cd_length;
649 
650 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
651 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
652 		    aes_xor_block);
653 		if (ret == CRYPTO_SUCCESS) {
654 			if (plaintext != ciphertext) {
655 				plaintext->cd_length =
656 				    plaintext->cd_offset - saved_offset;
657 			}
658 		} else {
659 			plaintext->cd_length = saved_length;
660 		}
661 
662 		plaintext->cd_offset = saved_offset;
663 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
664 		/* order of following 2 lines MUST not be reversed */
665 		plaintext->cd_offset = plaintext->cd_length;
666 		plaintext->cd_length = saved_length - plaintext->cd_length;
667 
668 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
669 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
670 		if (ret == CRYPTO_SUCCESS) {
671 			if (plaintext != ciphertext) {
672 				plaintext->cd_length =
673 				    plaintext->cd_offset - saved_offset;
674 			}
675 		} else {
676 			plaintext->cd_length = saved_length;
677 		}
678 
679 		plaintext->cd_offset = saved_offset;
680 	}
681 
682 	ASSERT(aes_ctx->ac_remainder_len == 0);
683 
684 cleanup:
685 	(void) aes_free_context(ctx);
686 
687 /* EXPORT DELETE END */
688 
689 	return (ret);
690 }
691 
692 
693 /* ARGSUSED */
694 static int
695 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
696     crypto_data_t *ciphertext, crypto_req_handle_t req)
697 {
698 	off_t saved_offset;
699 	size_t saved_length, out_len;
700 	int ret = CRYPTO_SUCCESS;
701 	aes_ctx_t *aes_ctx;
702 
703 	ASSERT(ctx->cc_provider_private != NULL);
704 	aes_ctx = ctx->cc_provider_private;
705 
706 	AES_ARG_INPLACE(plaintext, ciphertext);
707 
708 	/* compute number of bytes that will hold the ciphertext */
709 	out_len = aes_ctx->ac_remainder_len;
710 	out_len += plaintext->cd_length;
711 	out_len &= ~(AES_BLOCK_LEN - 1);
712 
713 	/* return length needed to store the output */
714 	if (ciphertext->cd_length < out_len) {
715 		ciphertext->cd_length = out_len;
716 		return (CRYPTO_BUFFER_TOO_SMALL);
717 	}
718 
719 	saved_offset = ciphertext->cd_offset;
720 	saved_length = ciphertext->cd_length;
721 
722 	/*
723 	 * Do the AES update on the specified input data.
724 	 */
725 	switch (plaintext->cd_format) {
726 	case CRYPTO_DATA_RAW:
727 		ret = crypto_update_iov(ctx->cc_provider_private,
728 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
729 		    aes_copy_block64);
730 		break;
731 	case CRYPTO_DATA_UIO:
732 		ret = crypto_update_uio(ctx->cc_provider_private,
733 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
734 		    aes_copy_block64);
735 		break;
736 	case CRYPTO_DATA_MBLK:
737 		ret = crypto_update_mp(ctx->cc_provider_private,
738 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
739 		    aes_copy_block64);
740 		break;
741 	default:
742 		ret = CRYPTO_ARGUMENTS_BAD;
743 	}
744 
745 	/*
746 	 * Since AES counter mode is a stream cipher, we call
747 	 * ctr_mode_final() to pick up any remaining bytes.
748 	 * It is an internal function that does not destroy
749 	 * the context like *normal* final routines.
750 	 */
751 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
752 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
753 		    ciphertext, aes_encrypt_block);
754 	}
755 
756 	if (ret == CRYPTO_SUCCESS) {
757 		if (plaintext != ciphertext)
758 			ciphertext->cd_length =
759 			    ciphertext->cd_offset - saved_offset;
760 	} else {
761 		ciphertext->cd_length = saved_length;
762 	}
763 	ciphertext->cd_offset = saved_offset;
764 
765 	return (ret);
766 }
767 
768 
769 static int
770 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
771     crypto_data_t *plaintext, crypto_req_handle_t req)
772 {
773 	off_t saved_offset;
774 	size_t saved_length, out_len;
775 	int ret = CRYPTO_SUCCESS;
776 	aes_ctx_t *aes_ctx;
777 
778 	ASSERT(ctx->cc_provider_private != NULL);
779 	aes_ctx = ctx->cc_provider_private;
780 
781 	AES_ARG_INPLACE(ciphertext, plaintext);
782 
783 	/*
784 	 * Compute number of bytes that will hold the plaintext.
785 	 * This is not necessary for CCM, GCM, and GMAC since these
786 	 * mechanisms never return plaintext for update operations.
787 	 */
788 	if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
789 		out_len = aes_ctx->ac_remainder_len;
790 		out_len += ciphertext->cd_length;
791 		out_len &= ~(AES_BLOCK_LEN - 1);
792 
793 		/* return length needed to store the output */
794 		if (plaintext->cd_length < out_len) {
795 			plaintext->cd_length = out_len;
796 			return (CRYPTO_BUFFER_TOO_SMALL);
797 		}
798 	}
799 
800 	saved_offset = plaintext->cd_offset;
801 	saved_length = plaintext->cd_length;
802 
803 	if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
804 		gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
805 
806 	/*
807 	 * Do the AES update on the specified input data.
808 	 */
809 	switch (ciphertext->cd_format) {
810 	case CRYPTO_DATA_RAW:
811 		ret = crypto_update_iov(ctx->cc_provider_private,
812 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
813 		    aes_copy_block64);
814 		break;
815 	case CRYPTO_DATA_UIO:
816 		ret = crypto_update_uio(ctx->cc_provider_private,
817 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
818 		    aes_copy_block64);
819 		break;
820 	case CRYPTO_DATA_MBLK:
821 		ret = crypto_update_mp(ctx->cc_provider_private,
822 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
823 		    aes_copy_block64);
824 		break;
825 	default:
826 		ret = CRYPTO_ARGUMENTS_BAD;
827 	}
828 
829 	/*
830 	 * Since AES counter mode is a stream cipher, we call
831 	 * ctr_mode_final() to pick up any remaining bytes.
832 	 * It is an internal function that does not destroy
833 	 * the context like *normal* final routines.
834 	 */
835 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
836 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
837 		    aes_encrypt_block);
838 		if (ret == CRYPTO_DATA_LEN_RANGE)
839 			ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
840 	}
841 
842 	if (ret == CRYPTO_SUCCESS) {
843 		if (ciphertext != plaintext)
844 			plaintext->cd_length =
845 			    plaintext->cd_offset - saved_offset;
846 	} else {
847 		plaintext->cd_length = saved_length;
848 	}
849 	plaintext->cd_offset = saved_offset;
850 
851 
852 	return (ret);
853 }
854 
855 /* ARGSUSED */
856 static int
857 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
858     crypto_req_handle_t req)
859 {
860 
861 /* EXPORT DELETE START */
862 
863 	aes_ctx_t *aes_ctx;
864 	int ret;
865 
866 	ASSERT(ctx->cc_provider_private != NULL);
867 	aes_ctx = ctx->cc_provider_private;
868 
869 	if (data->cd_format != CRYPTO_DATA_RAW &&
870 	    data->cd_format != CRYPTO_DATA_UIO &&
871 	    data->cd_format != CRYPTO_DATA_MBLK) {
872 		return (CRYPTO_ARGUMENTS_BAD);
873 	}
874 
875 	if (aes_ctx->ac_flags & CTR_MODE) {
876 		if (aes_ctx->ac_remainder_len > 0) {
877 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
878 			    aes_encrypt_block);
879 			if (ret != CRYPTO_SUCCESS)
880 				return (ret);
881 		}
882 	} else if (aes_ctx->ac_flags & CCM_MODE) {
883 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
884 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
885 		if (ret != CRYPTO_SUCCESS) {
886 			return (ret);
887 		}
888 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
889 		size_t saved_offset = data->cd_offset;
890 
891 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
892 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
893 		    aes_xor_block);
894 		if (ret != CRYPTO_SUCCESS) {
895 			return (ret);
896 		}
897 		data->cd_length = data->cd_offset - saved_offset;
898 		data->cd_offset = saved_offset;
899 	} else {
900 		/*
901 		 * There must be no unprocessed plaintext.
902 		 * This happens if the length of the last data is
903 		 * not a multiple of the AES block length.
904 		 */
905 		if (aes_ctx->ac_remainder_len > 0) {
906 			return (CRYPTO_DATA_LEN_RANGE);
907 		}
908 		data->cd_length = 0;
909 	}
910 
911 	(void) aes_free_context(ctx);
912 
913 /* EXPORT DELETE END */
914 
915 	return (CRYPTO_SUCCESS);
916 }
917 
918 /* ARGSUSED */
919 static int
920 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
921     crypto_req_handle_t req)
922 {
923 
924 /* EXPORT DELETE START */
925 
926 	aes_ctx_t *aes_ctx;
927 	int ret;
928 	off_t saved_offset;
929 	size_t saved_length;
930 
931 	ASSERT(ctx->cc_provider_private != NULL);
932 	aes_ctx = ctx->cc_provider_private;
933 
934 	if (data->cd_format != CRYPTO_DATA_RAW &&
935 	    data->cd_format != CRYPTO_DATA_UIO &&
936 	    data->cd_format != CRYPTO_DATA_MBLK) {
937 		return (CRYPTO_ARGUMENTS_BAD);
938 	}
939 
940 	/*
941 	 * There must be no unprocessed ciphertext.
942 	 * This happens if the length of the last ciphertext is
943 	 * not a multiple of the AES block length.
944 	 */
945 	if (aes_ctx->ac_remainder_len > 0) {
946 		if ((aes_ctx->ac_flags & CTR_MODE) == 0)
947 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
948 		else {
949 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
950 			    aes_encrypt_block);
951 			if (ret == CRYPTO_DATA_LEN_RANGE)
952 				ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
953 			if (ret != CRYPTO_SUCCESS)
954 				return (ret);
955 		}
956 	}
957 
958 	if (aes_ctx->ac_flags & CCM_MODE) {
959 		/*
960 		 * This is where all the plaintext is returned, make sure
961 		 * the plaintext buffer is big enough
962 		 */
963 		size_t pt_len = aes_ctx->ac_data_len;
964 		if (data->cd_length < pt_len) {
965 			data->cd_length = pt_len;
966 			return (CRYPTO_BUFFER_TOO_SMALL);
967 		}
968 
969 		ASSERT(aes_ctx->ac_processed_data_len == pt_len);
970 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
971 		saved_offset = data->cd_offset;
972 		saved_length = data->cd_length;
973 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
974 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
975 		    aes_xor_block);
976 		if (ret == CRYPTO_SUCCESS) {
977 			data->cd_length = data->cd_offset - saved_offset;
978 		} else {
979 			data->cd_length = saved_length;
980 		}
981 
982 		data->cd_offset = saved_offset;
983 		if (ret != CRYPTO_SUCCESS) {
984 			return (ret);
985 		}
986 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
987 		/*
988 		 * This is where all the plaintext is returned, make sure
989 		 * the plaintext buffer is big enough
990 		 */
991 		gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
992 		size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
993 
994 		if (data->cd_length < pt_len) {
995 			data->cd_length = pt_len;
996 			return (CRYPTO_BUFFER_TOO_SMALL);
997 		}
998 
999 		saved_offset = data->cd_offset;
1000 		saved_length = data->cd_length;
1001 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
1002 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
1003 		if (ret == CRYPTO_SUCCESS) {
1004 			data->cd_length = data->cd_offset - saved_offset;
1005 		} else {
1006 			data->cd_length = saved_length;
1007 		}
1008 
1009 		data->cd_offset = saved_offset;
1010 		if (ret != CRYPTO_SUCCESS) {
1011 			return (ret);
1012 		}
1013 	}
1014 
1015 
1016 	if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
1017 		data->cd_length = 0;
1018 	}
1019 
1020 	(void) aes_free_context(ctx);
1021 
1022 /* EXPORT DELETE END */
1023 
1024 	return (CRYPTO_SUCCESS);
1025 }
1026 
1027 /* ARGSUSED */
1028 static int
1029 aes_encrypt_atomic(crypto_provider_handle_t provider,
1030     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1031     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1032     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1033 {
1034 	aes_ctx_t aes_ctx;	/* on the stack */
1035 	off_t saved_offset;
1036 	size_t saved_length;
1037 	size_t length_needed;
1038 	int ret;
1039 
1040 	AES_ARG_INPLACE(plaintext, ciphertext);
1041 
1042 	/*
1043 	 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
1044 	 * be a multiple of AES block size.
1045 	 */
1046 	switch (mechanism->cm_type) {
1047 	case AES_CTR_MECH_INFO_TYPE:
1048 	case AES_CCM_MECH_INFO_TYPE:
1049 	case AES_GCM_MECH_INFO_TYPE:
1050 	case AES_GMAC_MECH_INFO_TYPE:
1051 		break;
1052 	default:
1053 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1054 			return (CRYPTO_DATA_LEN_RANGE);
1055 	}
1056 
1057 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1058 		return (ret);
1059 
1060 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1061 
1062 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1063 	    crypto_kmflag(req), B_TRUE);
1064 	if (ret != CRYPTO_SUCCESS)
1065 		return (ret);
1066 
1067 	switch (mechanism->cm_type) {
1068 	case AES_CCM_MECH_INFO_TYPE:
1069 		length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1070 		break;
1071 	case AES_GMAC_MECH_INFO_TYPE:
1072 		if (plaintext->cd_length != 0)
1073 			return (CRYPTO_ARGUMENTS_BAD);
1074 		/* FALLTHRU */
1075 	case AES_GCM_MECH_INFO_TYPE:
1076 		length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1077 		break;
1078 	default:
1079 		length_needed = plaintext->cd_length;
1080 	}
1081 
1082 	/* return size of buffer needed to store output */
1083 	if (ciphertext->cd_length < length_needed) {
1084 		ciphertext->cd_length = length_needed;
1085 		ret = CRYPTO_BUFFER_TOO_SMALL;
1086 		goto out;
1087 	}
1088 
1089 	saved_offset = ciphertext->cd_offset;
1090 	saved_length = ciphertext->cd_length;
1091 
1092 	/*
1093 	 * Do an update on the specified input data.
1094 	 */
1095 	switch (plaintext->cd_format) {
1096 	case CRYPTO_DATA_RAW:
1097 		ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1098 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1099 		break;
1100 	case CRYPTO_DATA_UIO:
1101 		ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1102 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1103 		break;
1104 	case CRYPTO_DATA_MBLK:
1105 		ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1106 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1107 		break;
1108 	default:
1109 		ret = CRYPTO_ARGUMENTS_BAD;
1110 	}
1111 
1112 	if (ret == CRYPTO_SUCCESS) {
1113 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1114 			ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1115 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1116 			    aes_xor_block);
1117 			if (ret != CRYPTO_SUCCESS)
1118 				goto out;
1119 			ASSERT(aes_ctx.ac_remainder_len == 0);
1120 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1121 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1122 			ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1123 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1124 			    aes_copy_block, aes_xor_block);
1125 			if (ret != CRYPTO_SUCCESS)
1126 				goto out;
1127 			ASSERT(aes_ctx.ac_remainder_len == 0);
1128 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1129 			if (aes_ctx.ac_remainder_len > 0) {
1130 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1131 				    ciphertext, aes_encrypt_block);
1132 				if (ret != CRYPTO_SUCCESS)
1133 					goto out;
1134 			}
1135 		} else {
1136 			ASSERT(aes_ctx.ac_remainder_len == 0);
1137 		}
1138 
1139 		if (plaintext != ciphertext) {
1140 			ciphertext->cd_length =
1141 			    ciphertext->cd_offset - saved_offset;
1142 		}
1143 	} else {
1144 		ciphertext->cd_length = saved_length;
1145 	}
1146 	ciphertext->cd_offset = saved_offset;
1147 
1148 out:
1149 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1150 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1151 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1152 	}
1153 
1154 	return (ret);
1155 }
1156 
1157 /* ARGSUSED */
1158 static int
1159 aes_decrypt_atomic(crypto_provider_handle_t provider,
1160     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1161     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1162     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1163 {
1164 	aes_ctx_t aes_ctx;	/* on the stack */
1165 	off_t saved_offset;
1166 	size_t saved_length;
1167 	size_t length_needed;
1168 	int ret;
1169 
1170 	AES_ARG_INPLACE(ciphertext, plaintext);
1171 
1172 	/*
1173 	 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1174 	 * be a multiple of AES block size.
1175 	 */
1176 	switch (mechanism->cm_type) {
1177 	case AES_CTR_MECH_INFO_TYPE:
1178 	case AES_CCM_MECH_INFO_TYPE:
1179 	case AES_GCM_MECH_INFO_TYPE:
1180 	case AES_GMAC_MECH_INFO_TYPE:
1181 		break;
1182 	default:
1183 		if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1184 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1185 	}
1186 
1187 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1188 		return (ret);
1189 
1190 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1191 
1192 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1193 	    crypto_kmflag(req), B_FALSE);
1194 	if (ret != CRYPTO_SUCCESS)
1195 		return (ret);
1196 
1197 	switch (mechanism->cm_type) {
1198 	case AES_CCM_MECH_INFO_TYPE:
1199 		length_needed = aes_ctx.ac_data_len;
1200 		break;
1201 	case AES_GCM_MECH_INFO_TYPE:
1202 		length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1203 		break;
1204 	case AES_GMAC_MECH_INFO_TYPE:
1205 		if (plaintext->cd_length != 0)
1206 			return (CRYPTO_ARGUMENTS_BAD);
1207 		length_needed = 0;
1208 		break;
1209 	default:
1210 		length_needed = ciphertext->cd_length;
1211 	}
1212 
1213 	/* return size of buffer needed to store output */
1214 	if (plaintext->cd_length < length_needed) {
1215 		plaintext->cd_length = length_needed;
1216 		ret = CRYPTO_BUFFER_TOO_SMALL;
1217 		goto out;
1218 	}
1219 
1220 	saved_offset = plaintext->cd_offset;
1221 	saved_length = plaintext->cd_length;
1222 
1223 	if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1224 	    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1225 		gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1226 
1227 	/*
1228 	 * Do an update on the specified input data.
1229 	 */
1230 	switch (ciphertext->cd_format) {
1231 	case CRYPTO_DATA_RAW:
1232 		ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1233 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1234 		break;
1235 	case CRYPTO_DATA_UIO:
1236 		ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1237 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1238 		break;
1239 	case CRYPTO_DATA_MBLK:
1240 		ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1241 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1242 		break;
1243 	default:
1244 		ret = CRYPTO_ARGUMENTS_BAD;
1245 	}
1246 
1247 	if (ret == CRYPTO_SUCCESS) {
1248 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1249 			ASSERT(aes_ctx.ac_processed_data_len
1250 			    == aes_ctx.ac_data_len);
1251 			ASSERT(aes_ctx.ac_processed_mac_len
1252 			    == aes_ctx.ac_mac_len);
1253 			ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1254 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1255 			    aes_copy_block, aes_xor_block);
1256 			ASSERT(aes_ctx.ac_remainder_len == 0);
1257 			if ((ret == CRYPTO_SUCCESS) &&
1258 			    (ciphertext != plaintext)) {
1259 				plaintext->cd_length =
1260 				    plaintext->cd_offset - saved_offset;
1261 			} else {
1262 				plaintext->cd_length = saved_length;
1263 			}
1264 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1265 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1266 			ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1267 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1268 			    aes_xor_block);
1269 			ASSERT(aes_ctx.ac_remainder_len == 0);
1270 			if ((ret == CRYPTO_SUCCESS) &&
1271 			    (ciphertext != plaintext)) {
1272 				plaintext->cd_length =
1273 				    plaintext->cd_offset - saved_offset;
1274 			} else {
1275 				plaintext->cd_length = saved_length;
1276 			}
1277 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1278 			ASSERT(aes_ctx.ac_remainder_len == 0);
1279 			if (ciphertext != plaintext)
1280 				plaintext->cd_length =
1281 				    plaintext->cd_offset - saved_offset;
1282 		} else {
1283 			if (aes_ctx.ac_remainder_len > 0) {
1284 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1285 				    plaintext, aes_encrypt_block);
1286 				if (ret == CRYPTO_DATA_LEN_RANGE)
1287 					ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1288 				if (ret != CRYPTO_SUCCESS)
1289 					goto out;
1290 			}
1291 			if (ciphertext != plaintext)
1292 				plaintext->cd_length =
1293 				    plaintext->cd_offset - saved_offset;
1294 		}
1295 	} else {
1296 		plaintext->cd_length = saved_length;
1297 	}
1298 	plaintext->cd_offset = saved_offset;
1299 
1300 out:
1301 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1302 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1303 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1304 	}
1305 
1306 	if (aes_ctx.ac_flags & CCM_MODE) {
1307 		if (aes_ctx.ac_pt_buf != NULL) {
1308 			kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1309 		}
1310 	} else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1311 		if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1312 			kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1313 			    ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1314 		}
1315 	}
1316 
1317 	return (ret);
1318 }
1319 
1320 /*
1321  * KCF software provider context template entry points.
1322  */
1323 /* ARGSUSED */
1324 static int
1325 aes_create_ctx_template(crypto_provider_handle_t provider,
1326     crypto_mechanism_t *mechanism, crypto_key_t *key,
1327     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1328 {
1329 
1330 /* EXPORT DELETE START */
1331 
1332 	void *keysched;
1333 	size_t size;
1334 	int rv;
1335 
1336 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1337 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1338 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1339 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1340 	    mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1341 	    mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1342 		return (CRYPTO_MECHANISM_INVALID);
1343 
1344 	if ((keysched = aes_alloc_keysched(&size,
1345 	    crypto_kmflag(req))) == NULL) {
1346 		return (CRYPTO_HOST_MEMORY);
1347 	}
1348 
1349 	/*
1350 	 * Initialize key schedule.  Key length information is stored
1351 	 * in the key.
1352 	 */
1353 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1354 		bzero(keysched, size);
1355 		kmem_free(keysched, size);
1356 		return (rv);
1357 	}
1358 
1359 	*tmpl = keysched;
1360 	*tmpl_size = size;
1361 
1362 /* EXPORT DELETE END */
1363 
1364 	return (CRYPTO_SUCCESS);
1365 }
1366 
1367 
1368 static int
1369 aes_free_context(crypto_ctx_t *ctx)
1370 {
1371 
1372 /* EXPORT DELETE START */
1373 
1374 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1375 
1376 	if (aes_ctx != NULL) {
1377 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1378 			ASSERT(aes_ctx->ac_keysched_len != 0);
1379 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1380 			kmem_free(aes_ctx->ac_keysched,
1381 			    aes_ctx->ac_keysched_len);
1382 		}
1383 		crypto_free_mode_ctx(aes_ctx);
1384 		ctx->cc_provider_private = NULL;
1385 	}
1386 
1387 /* EXPORT DELETE END */
1388 
1389 	return (CRYPTO_SUCCESS);
1390 }
1391 
1392 
1393 static int
1394 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1395     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1396     boolean_t is_encrypt_init)
1397 {
1398 	int rv = CRYPTO_SUCCESS;
1399 
1400 /* EXPORT DELETE START */
1401 
1402 	void *keysched;
1403 	size_t size;
1404 
1405 	if (template == NULL) {
1406 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1407 			return (CRYPTO_HOST_MEMORY);
1408 		/*
1409 		 * Initialize key schedule.
1410 		 * Key length is stored in the key.
1411 		 */
1412 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1413 			kmem_free(keysched, size);
1414 			return (rv);
1415 		}
1416 
1417 		aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1418 		aes_ctx->ac_keysched_len = size;
1419 	} else {
1420 		keysched = template;
1421 	}
1422 	aes_ctx->ac_keysched = keysched;
1423 
1424 	switch (mechanism->cm_type) {
1425 	case AES_CBC_MECH_INFO_TYPE:
1426 		rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1427 		    mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1428 		break;
1429 	case AES_CTR_MECH_INFO_TYPE: {
1430 		CK_AES_CTR_PARAMS *pp;
1431 
1432 		if (mechanism->cm_param == NULL ||
1433 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1434 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1435 		}
1436 		pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1437 		rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1438 		    pp->cb, aes_copy_block);
1439 		break;
1440 	}
1441 	case AES_CCM_MECH_INFO_TYPE:
1442 		if (mechanism->cm_param == NULL ||
1443 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1444 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1445 		}
1446 		rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1447 		    kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1448 		    aes_xor_block);
1449 		break;
1450 	case AES_GCM_MECH_INFO_TYPE:
1451 		if (mechanism->cm_param == NULL ||
1452 		    mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1453 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1454 		}
1455 		rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1456 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1457 		    aes_xor_block);
1458 		break;
1459 	case AES_GMAC_MECH_INFO_TYPE:
1460 		if (mechanism->cm_param == NULL ||
1461 		    mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1462 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1463 		}
1464 		rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1465 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1466 		    aes_xor_block);
1467 		break;
1468 	case AES_ECB_MECH_INFO_TYPE:
1469 		aes_ctx->ac_flags |= ECB_MODE;
1470 	}
1471 
1472 	if (rv != CRYPTO_SUCCESS) {
1473 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1474 			bzero(keysched, size);
1475 			kmem_free(keysched, size);
1476 		}
1477 	}
1478 
1479 /* EXPORT DELETE END */
1480 
1481 	return (rv);
1482 }
1483 
1484 static int
1485 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1486     CK_AES_GCM_PARAMS *gcm_params)
1487 {
1488 	/* LINTED: pointer alignment */
1489 	CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1490 
1491 	if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1492 		return (CRYPTO_MECHANISM_INVALID);
1493 
1494 	if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1495 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1496 
1497 	if (params->pIv == NULL)
1498 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1499 
1500 	gcm_params->pIv = params->pIv;
1501 	gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1502 	gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1503 
1504 	if (data == NULL)
1505 		return (CRYPTO_SUCCESS);
1506 
1507 	if (data->cd_format != CRYPTO_DATA_RAW)
1508 		return (CRYPTO_ARGUMENTS_BAD);
1509 
1510 	gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1511 	gcm_params->ulAADLen = data->cd_length;
1512 	return (CRYPTO_SUCCESS);
1513 }
1514 
1515 static int
1516 aes_mac_atomic(crypto_provider_handle_t provider,
1517     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1518     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1519     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1520 {
1521 	CK_AES_GCM_PARAMS gcm_params;
1522 	crypto_mechanism_t gcm_mech;
1523 	int rv;
1524 
1525 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1526 	    != CRYPTO_SUCCESS)
1527 		return (rv);
1528 
1529 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1530 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1531 	gcm_mech.cm_param = (char *)&gcm_params;
1532 
1533 	return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1534 	    key, &null_crypto_data, mac, template, req));
1535 }
1536 
1537 static int
1538 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1539     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1540     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1541     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1542 {
1543 	CK_AES_GCM_PARAMS gcm_params;
1544 	crypto_mechanism_t gcm_mech;
1545 	int rv;
1546 
1547 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1548 	    != CRYPTO_SUCCESS)
1549 		return (rv);
1550 
1551 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1552 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1553 	gcm_mech.cm_param = (char *)&gcm_params;
1554 
1555 	return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1556 	    key, mac, &null_crypto_data, template, req));
1557 }
1558 
1559 /*
1560  * AES Power-Up Self-Test
1561  */
1562 void
1563 aes_POST(int *rc)
1564 {
1565 
1566 	int ret;
1567 
1568 	/* AES Power-Up Self-Test for 128-bit key. */
1569 	ret = fips_aes_post(FIPS_AES_128_KEY_SIZE);
1570 
1571 	if (ret != CRYPTO_SUCCESS)
1572 		goto out;
1573 
1574 	/* AES Power-Up Self-Test for 192-bit key. */
1575 	ret = fips_aes_post(FIPS_AES_192_KEY_SIZE);
1576 
1577 	if (ret != CRYPTO_SUCCESS)
1578 		goto out;
1579 
1580 	/* AES Power-Up Self-Test for 256-bit key. */
1581 	ret = fips_aes_post(FIPS_AES_256_KEY_SIZE);
1582 
1583 out:
1584 	*rc = ret;
1585 
1586 }
1587