xref: /titanic_52/usr/src/uts/common/crypto/io/aes.c (revision 8fd04b8338ed5093ec2d1e668fa620b7de44c177)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 /*
27  * AES provider for the Kernel Cryptographic Framework (KCF)
28  */
29 
30 #include <sys/types.h>
31 #include <sys/systm.h>
32 #include <sys/modctl.h>
33 #include <sys/cmn_err.h>
34 #include <sys/ddi.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/crypto/spi.h>
38 #include <sys/sysmacros.h>
39 #include <sys/strsun.h>
40 #include <modes/modes.h>
41 #define	_AES_FIPS_POST
42 #define	_AES_IMPL
43 #include <aes/aes_impl.h>
44 
45 extern struct mod_ops mod_cryptoops;
46 
47 /*
48  * Module linkage information for the kernel.
49  */
50 static struct modlcrypto modlcrypto = {
51 	&mod_cryptoops,
52 	"AES Kernel SW Provider"
53 };
54 
55 static struct modlinkage modlinkage = {
56 	MODREV_1,
57 	(void *)&modlcrypto,
58 	NULL
59 };
60 
61 /*
62  * The following definitions are to keep EXPORT_SRC happy.
63  */
64 #ifndef AES_MIN_KEY_BYTES
65 #define	AES_MIN_KEY_BYTES		0
66 #endif
67 
68 #ifndef AES_MAX_KEY_BYTES
69 #define	AES_MAX_KEY_BYTES		0
70 #endif
71 
72 /*
73  * Mechanism info structure passed to KCF during registration.
74  */
75 static crypto_mech_info_t aes_mech_info_tab[] = {
76 	/* AES_ECB */
77 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
78 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
79 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
80 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
81 	/* AES_CBC */
82 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
83 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
84 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
85 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
86 	/* AES_CTR */
87 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
88 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
89 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
90 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
91 	/* AES_CCM */
92 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
93 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
94 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
95 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
96 	/* AES_GCM */
97 	{SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
98 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
99 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
100 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
101 	/* AES_GMAC */
102 	{SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
103 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
104 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
105 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
106 	    CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
107 	    CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
108 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
109 };
110 
111 /* operations are in-place if the output buffer is NULL */
112 #define	AES_ARG_INPLACE(input, output)				\
113 	if ((output) == NULL)					\
114 		(output) = (input);
115 
116 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
117 
118 static crypto_control_ops_t aes_control_ops = {
119 	aes_provider_status
120 };
121 
122 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
123     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
124 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
125     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
126 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
127     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
128 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
129     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
130 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
131     crypto_req_handle_t);
132 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
133     crypto_req_handle_t);
134 
135 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
136     crypto_req_handle_t);
137 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
138     crypto_data_t *, crypto_req_handle_t);
139 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
140     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
141     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
142 
143 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
144     crypto_req_handle_t);
145 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
146     crypto_data_t *, crypto_req_handle_t);
147 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
148     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
149     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
150 
151 static crypto_cipher_ops_t aes_cipher_ops = {
152 	aes_encrypt_init,
153 	aes_encrypt,
154 	aes_encrypt_update,
155 	aes_encrypt_final,
156 	aes_encrypt_atomic,
157 	aes_decrypt_init,
158 	aes_decrypt,
159 	aes_decrypt_update,
160 	aes_decrypt_final,
161 	aes_decrypt_atomic
162 };
163 
164 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
165     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
166     crypto_spi_ctx_template_t, crypto_req_handle_t);
167 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
168     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
169     crypto_spi_ctx_template_t, crypto_req_handle_t);
170 
171 static crypto_mac_ops_t aes_mac_ops = {
172 	NULL,
173 	NULL,
174 	NULL,
175 	NULL,
176 	aes_mac_atomic,
177 	aes_mac_verify_atomic
178 };
179 
180 static int aes_create_ctx_template(crypto_provider_handle_t,
181     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
182     size_t *, crypto_req_handle_t);
183 static int aes_free_context(crypto_ctx_t *);
184 
185 static crypto_ctx_ops_t aes_ctx_ops = {
186 	aes_create_ctx_template,
187 	aes_free_context
188 };
189 
190 static void aes_POST(int *);
191 
192 static crypto_fips140_ops_t aes_fips140_ops = {
193 	aes_POST
194 };
195 
196 static crypto_ops_t aes_crypto_ops = {
197 	&aes_control_ops,
198 	NULL,
199 	&aes_cipher_ops,
200 	&aes_mac_ops,
201 	NULL,
202 	NULL,
203 	NULL,
204 	NULL,
205 	NULL,
206 	NULL,
207 	NULL,
208 	NULL,
209 	NULL,
210 	&aes_ctx_ops,
211 	NULL,
212 	NULL,
213 	&aes_fips140_ops
214 };
215 
216 static crypto_provider_info_t aes_prov_info = {
217 	CRYPTO_SPI_VERSION_4,
218 	"AES Software Provider",
219 	CRYPTO_SW_PROVIDER,
220 	{&modlinkage},
221 	NULL,
222 	&aes_crypto_ops,
223 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
224 	aes_mech_info_tab
225 };
226 
227 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
228 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
229 
230 int
231 _init(void)
232 {
233 	int ret;
234 
235 	if ((ret = mod_install(&modlinkage)) != 0)
236 		return (ret);
237 
238 	/* Register with KCF.  If the registration fails, remove the module. */
239 	if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
240 		(void) mod_remove(&modlinkage);
241 		return (EACCES);
242 	}
243 
244 	return (0);
245 }
246 
247 int
248 _fini(void)
249 {
250 	/* Unregister from KCF if module is registered */
251 	if (aes_prov_handle != NULL) {
252 		if (crypto_unregister_provider(aes_prov_handle))
253 			return (EBUSY);
254 
255 		aes_prov_handle = NULL;
256 	}
257 
258 	return (mod_remove(&modlinkage));
259 }
260 
261 int
262 _info(struct modinfo *modinfop)
263 {
264 	return (mod_info(&modlinkage, modinfop));
265 }
266 
267 
268 static int
269 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
270 {
271 	void *p = NULL;
272 	boolean_t param_required = B_TRUE;
273 	size_t param_len;
274 	void *(*alloc_fun)(int);
275 	int rv = CRYPTO_SUCCESS;
276 
277 	switch (mechanism->cm_type) {
278 	case AES_ECB_MECH_INFO_TYPE:
279 		param_required = B_FALSE;
280 		alloc_fun = ecb_alloc_ctx;
281 		break;
282 	case AES_CBC_MECH_INFO_TYPE:
283 		param_len = AES_BLOCK_LEN;
284 		alloc_fun = cbc_alloc_ctx;
285 		break;
286 	case AES_CTR_MECH_INFO_TYPE:
287 		param_len = sizeof (CK_AES_CTR_PARAMS);
288 		alloc_fun = ctr_alloc_ctx;
289 		break;
290 	case AES_CCM_MECH_INFO_TYPE:
291 		param_len = sizeof (CK_AES_CCM_PARAMS);
292 		alloc_fun = ccm_alloc_ctx;
293 		break;
294 	case AES_GCM_MECH_INFO_TYPE:
295 		param_len = sizeof (CK_AES_GCM_PARAMS);
296 		alloc_fun = gcm_alloc_ctx;
297 		break;
298 	case AES_GMAC_MECH_INFO_TYPE:
299 		param_len = sizeof (CK_AES_GMAC_PARAMS);
300 		alloc_fun = gmac_alloc_ctx;
301 		break;
302 	default:
303 		rv = CRYPTO_MECHANISM_INVALID;
304 	}
305 	if (param_required && mechanism->cm_param != NULL &&
306 	    mechanism->cm_param_len != param_len) {
307 		rv = CRYPTO_MECHANISM_PARAM_INVALID;
308 	}
309 	if (ctx != NULL) {
310 		p = (alloc_fun)(kmflag);
311 		*ctx = p;
312 	}
313 	return (rv);
314 }
315 
316 /* EXPORT DELETE START */
317 
318 /*
319  * Initialize key schedules for AES
320  */
321 static int
322 init_keysched(crypto_key_t *key, void *newbie)
323 {
324 	/*
325 	 * Only keys by value are supported by this module.
326 	 */
327 	switch (key->ck_format) {
328 	case CRYPTO_KEY_RAW:
329 		if (key->ck_length < AES_MINBITS ||
330 		    key->ck_length > AES_MAXBITS) {
331 			return (CRYPTO_KEY_SIZE_RANGE);
332 		}
333 
334 		/* key length must be either 128, 192, or 256 */
335 		if ((key->ck_length & 63) != 0)
336 			return (CRYPTO_KEY_SIZE_RANGE);
337 		break;
338 	default:
339 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
340 	}
341 
342 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
343 	return (CRYPTO_SUCCESS);
344 }
345 
346 /* EXPORT DELETE END */
347 
348 /*
349  * KCF software provider control entry points.
350  */
351 /* ARGSUSED */
352 static void
353 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
354 {
355 	*status = CRYPTO_PROVIDER_READY;
356 }
357 
358 static int
359 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
360     crypto_key_t *key, crypto_spi_ctx_template_t template,
361     crypto_req_handle_t req) {
362 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
363 }
364 
365 static int
366 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
367     crypto_key_t *key, crypto_spi_ctx_template_t template,
368     crypto_req_handle_t req) {
369 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
370 }
371 
372 
373 
374 /*
375  * KCF software provider encrypt entry points.
376  */
377 static int
378 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
379     crypto_key_t *key, crypto_spi_ctx_template_t template,
380     crypto_req_handle_t req, boolean_t is_encrypt_init)
381 {
382 
383 /* EXPORT DELETE START */
384 
385 	aes_ctx_t *aes_ctx;
386 	int rv;
387 	int kmflag;
388 
389 	/*
390 	 * Only keys by value are supported by this module.
391 	 */
392 	if (key->ck_format != CRYPTO_KEY_RAW) {
393 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
394 	}
395 
396 	kmflag = crypto_kmflag(req);
397 	if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
398 	    != CRYPTO_SUCCESS)
399 		return (rv);
400 
401 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
402 	    is_encrypt_init);
403 	if (rv != CRYPTO_SUCCESS) {
404 		crypto_free_mode_ctx(aes_ctx);
405 		return (rv);
406 	}
407 
408 	ctx->cc_provider_private = aes_ctx;
409 
410 /* EXPORT DELETE END */
411 
412 	return (CRYPTO_SUCCESS);
413 }
414 
415 static void
416 aes_copy_block64(uint8_t *in, uint64_t *out)
417 {
418 	if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
419 		/* LINTED: pointer alignment */
420 		out[0] = *(uint64_t *)&in[0];
421 		/* LINTED: pointer alignment */
422 		out[1] = *(uint64_t *)&in[8];
423 	} else {
424 		uint8_t *iv8 = (uint8_t *)&out[0];
425 
426 		AES_COPY_BLOCK(in, iv8);
427 	}
428 }
429 
430 
431 static int
432 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
433     crypto_data_t *ciphertext, crypto_req_handle_t req)
434 {
435 	int ret = CRYPTO_FAILED;
436 
437 /* EXPORT DELETE START */
438 
439 	aes_ctx_t *aes_ctx;
440 	size_t saved_length, saved_offset, length_needed;
441 
442 	ASSERT(ctx->cc_provider_private != NULL);
443 	aes_ctx = ctx->cc_provider_private;
444 
445 	/*
446 	 * For block ciphers, plaintext must be a multiple of AES block size.
447 	 * This test is only valid for ciphers whose blocksize is a power of 2.
448 	 */
449 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
450 	    == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
451 		return (CRYPTO_DATA_LEN_RANGE);
452 
453 	AES_ARG_INPLACE(plaintext, ciphertext);
454 
455 	/*
456 	 * We need to just return the length needed to store the output.
457 	 * We should not destroy the context for the following case.
458 	 */
459 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
460 	case CCM_MODE:
461 		length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
462 		break;
463 	case GCM_MODE:
464 		length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
465 		break;
466 	case GMAC_MODE:
467 		if (plaintext->cd_length != 0)
468 			return (CRYPTO_ARGUMENTS_BAD);
469 
470 		length_needed = aes_ctx->ac_tag_len;
471 		break;
472 	default:
473 		length_needed = plaintext->cd_length;
474 	}
475 
476 	if (ciphertext->cd_length < length_needed) {
477 		ciphertext->cd_length = length_needed;
478 		return (CRYPTO_BUFFER_TOO_SMALL);
479 	}
480 
481 	saved_length = ciphertext->cd_length;
482 	saved_offset = ciphertext->cd_offset;
483 
484 	/*
485 	 * Do an update on the specified input data.
486 	 */
487 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
488 	if (ret != CRYPTO_SUCCESS) {
489 		return (ret);
490 	}
491 
492 	/*
493 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
494 	 * left-over unprocessed data, and compute the MAC
495 	 */
496 	if (aes_ctx->ac_flags & CCM_MODE) {
497 		/*
498 		 * ccm_encrypt_final() will compute the MAC and append
499 		 * it to existing ciphertext. So, need to adjust the left over
500 		 * length value accordingly
501 		 */
502 
503 		/* order of following 2 lines MUST not be reversed */
504 		ciphertext->cd_offset = ciphertext->cd_length;
505 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
506 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
507 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
508 		if (ret != CRYPTO_SUCCESS) {
509 			return (ret);
510 		}
511 
512 		if (plaintext != ciphertext) {
513 			ciphertext->cd_length =
514 			    ciphertext->cd_offset - saved_offset;
515 		}
516 		ciphertext->cd_offset = saved_offset;
517 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
518 		/*
519 		 * gcm_encrypt_final() will compute the MAC and append
520 		 * it to existing ciphertext. So, need to adjust the left over
521 		 * length value accordingly
522 		 */
523 
524 		/* order of following 2 lines MUST not be reversed */
525 		ciphertext->cd_offset = ciphertext->cd_length;
526 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
527 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
528 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
529 		    aes_xor_block);
530 		if (ret != CRYPTO_SUCCESS) {
531 			return (ret);
532 		}
533 
534 		if (plaintext != ciphertext) {
535 			ciphertext->cd_length =
536 			    ciphertext->cd_offset - saved_offset;
537 		}
538 		ciphertext->cd_offset = saved_offset;
539 	}
540 
541 	ASSERT(aes_ctx->ac_remainder_len == 0);
542 	(void) aes_free_context(ctx);
543 
544 /* EXPORT DELETE END */
545 
546 	return (ret);
547 }
548 
549 
550 static int
551 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
552     crypto_data_t *plaintext, crypto_req_handle_t req)
553 {
554 	int ret = CRYPTO_FAILED;
555 
556 /* EXPORT DELETE START */
557 
558 	aes_ctx_t *aes_ctx;
559 	off_t saved_offset;
560 	size_t saved_length, length_needed;
561 
562 	ASSERT(ctx->cc_provider_private != NULL);
563 	aes_ctx = ctx->cc_provider_private;
564 
565 	/*
566 	 * For block ciphers, plaintext must be a multiple of AES block size.
567 	 * This test is only valid for ciphers whose blocksize is a power of 2.
568 	 */
569 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
570 	    == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
571 		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
572 	}
573 
574 	AES_ARG_INPLACE(ciphertext, plaintext);
575 
576 	/*
577 	 * Return length needed to store the output.
578 	 * Do not destroy context when plaintext buffer is too small.
579 	 *
580 	 * CCM:  plaintext is MAC len smaller than cipher text
581 	 * GCM:  plaintext is TAG len smaller than cipher text
582 	 * GMAC: plaintext length must be zero
583 	 */
584 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
585 	case CCM_MODE:
586 		length_needed = aes_ctx->ac_processed_data_len;
587 		break;
588 	case GCM_MODE:
589 		length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
590 		break;
591 	case GMAC_MODE:
592 		if (plaintext->cd_length != 0)
593 			return (CRYPTO_ARGUMENTS_BAD);
594 
595 		length_needed = 0;
596 		break;
597 	default:
598 		length_needed = ciphertext->cd_length;
599 	}
600 
601 	if (plaintext->cd_length < length_needed) {
602 		plaintext->cd_length = length_needed;
603 		return (CRYPTO_BUFFER_TOO_SMALL);
604 	}
605 
606 	saved_offset = plaintext->cd_offset;
607 	saved_length = plaintext->cd_length;
608 
609 	/*
610 	 * Do an update on the specified input data.
611 	 */
612 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
613 	if (ret != CRYPTO_SUCCESS) {
614 		goto cleanup;
615 	}
616 
617 	if (aes_ctx->ac_flags & CCM_MODE) {
618 		ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
619 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
620 
621 		/* order of following 2 lines MUST not be reversed */
622 		plaintext->cd_offset = plaintext->cd_length;
623 		plaintext->cd_length = saved_length - plaintext->cd_length;
624 
625 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
626 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
627 		    aes_xor_block);
628 		if (ret == CRYPTO_SUCCESS) {
629 			if (plaintext != ciphertext) {
630 				plaintext->cd_length =
631 				    plaintext->cd_offset - saved_offset;
632 			}
633 		} else {
634 			plaintext->cd_length = saved_length;
635 		}
636 
637 		plaintext->cd_offset = saved_offset;
638 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
639 		/* order of following 2 lines MUST not be reversed */
640 		plaintext->cd_offset = plaintext->cd_length;
641 		plaintext->cd_length = saved_length - plaintext->cd_length;
642 
643 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
644 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
645 		if (ret == CRYPTO_SUCCESS) {
646 			if (plaintext != ciphertext) {
647 				plaintext->cd_length =
648 				    plaintext->cd_offset - saved_offset;
649 			}
650 		} else {
651 			plaintext->cd_length = saved_length;
652 		}
653 
654 		plaintext->cd_offset = saved_offset;
655 	}
656 
657 	ASSERT(aes_ctx->ac_remainder_len == 0);
658 
659 cleanup:
660 	(void) aes_free_context(ctx);
661 
662 /* EXPORT DELETE END */
663 
664 	return (ret);
665 }
666 
667 
668 /* ARGSUSED */
669 static int
670 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
671     crypto_data_t *ciphertext, crypto_req_handle_t req)
672 {
673 	off_t saved_offset;
674 	size_t saved_length, out_len;
675 	int ret = CRYPTO_SUCCESS;
676 	aes_ctx_t *aes_ctx;
677 
678 	ASSERT(ctx->cc_provider_private != NULL);
679 	aes_ctx = ctx->cc_provider_private;
680 
681 	AES_ARG_INPLACE(plaintext, ciphertext);
682 
683 	/* compute number of bytes that will hold the ciphertext */
684 	out_len = aes_ctx->ac_remainder_len;
685 	out_len += plaintext->cd_length;
686 	out_len &= ~(AES_BLOCK_LEN - 1);
687 
688 	/* return length needed to store the output */
689 	if (ciphertext->cd_length < out_len) {
690 		ciphertext->cd_length = out_len;
691 		return (CRYPTO_BUFFER_TOO_SMALL);
692 	}
693 
694 	saved_offset = ciphertext->cd_offset;
695 	saved_length = ciphertext->cd_length;
696 
697 	/*
698 	 * Do the AES update on the specified input data.
699 	 */
700 	switch (plaintext->cd_format) {
701 	case CRYPTO_DATA_RAW:
702 		ret = crypto_update_iov(ctx->cc_provider_private,
703 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
704 		    aes_copy_block64);
705 		break;
706 	case CRYPTO_DATA_UIO:
707 		ret = crypto_update_uio(ctx->cc_provider_private,
708 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
709 		    aes_copy_block64);
710 		break;
711 	case CRYPTO_DATA_MBLK:
712 		ret = crypto_update_mp(ctx->cc_provider_private,
713 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
714 		    aes_copy_block64);
715 		break;
716 	default:
717 		ret = CRYPTO_ARGUMENTS_BAD;
718 	}
719 
720 	/*
721 	 * Since AES counter mode is a stream cipher, we call
722 	 * ctr_mode_final() to pick up any remaining bytes.
723 	 * It is an internal function that does not destroy
724 	 * the context like *normal* final routines.
725 	 */
726 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
727 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
728 		    ciphertext, aes_encrypt_block);
729 	}
730 
731 	if (ret == CRYPTO_SUCCESS) {
732 		if (plaintext != ciphertext)
733 			ciphertext->cd_length =
734 			    ciphertext->cd_offset - saved_offset;
735 	} else {
736 		ciphertext->cd_length = saved_length;
737 	}
738 	ciphertext->cd_offset = saved_offset;
739 
740 	return (ret);
741 }
742 
743 
744 static int
745 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
746     crypto_data_t *plaintext, crypto_req_handle_t req)
747 {
748 	off_t saved_offset;
749 	size_t saved_length, out_len;
750 	int ret = CRYPTO_SUCCESS;
751 	aes_ctx_t *aes_ctx;
752 
753 	ASSERT(ctx->cc_provider_private != NULL);
754 	aes_ctx = ctx->cc_provider_private;
755 
756 	AES_ARG_INPLACE(ciphertext, plaintext);
757 
758 	/*
759 	 * Compute number of bytes that will hold the plaintext.
760 	 * This is not necessary for CCM, GCM, and GMAC since these
761 	 * mechanisms never return plaintext for update operations.
762 	 */
763 	if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
764 		out_len = aes_ctx->ac_remainder_len;
765 		out_len += ciphertext->cd_length;
766 		out_len &= ~(AES_BLOCK_LEN - 1);
767 
768 		/* return length needed to store the output */
769 		if (plaintext->cd_length < out_len) {
770 			plaintext->cd_length = out_len;
771 			return (CRYPTO_BUFFER_TOO_SMALL);
772 		}
773 	}
774 
775 	saved_offset = plaintext->cd_offset;
776 	saved_length = plaintext->cd_length;
777 
778 	if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
779 		gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
780 
781 	/*
782 	 * Do the AES update on the specified input data.
783 	 */
784 	switch (ciphertext->cd_format) {
785 	case CRYPTO_DATA_RAW:
786 		ret = crypto_update_iov(ctx->cc_provider_private,
787 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
788 		    aes_copy_block64);
789 		break;
790 	case CRYPTO_DATA_UIO:
791 		ret = crypto_update_uio(ctx->cc_provider_private,
792 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
793 		    aes_copy_block64);
794 		break;
795 	case CRYPTO_DATA_MBLK:
796 		ret = crypto_update_mp(ctx->cc_provider_private,
797 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
798 		    aes_copy_block64);
799 		break;
800 	default:
801 		ret = CRYPTO_ARGUMENTS_BAD;
802 	}
803 
804 	/*
805 	 * Since AES counter mode is a stream cipher, we call
806 	 * ctr_mode_final() to pick up any remaining bytes.
807 	 * It is an internal function that does not destroy
808 	 * the context like *normal* final routines.
809 	 */
810 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
811 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
812 		    aes_encrypt_block);
813 		if (ret == CRYPTO_DATA_LEN_RANGE)
814 			ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
815 	}
816 
817 	if (ret == CRYPTO_SUCCESS) {
818 		if (ciphertext != plaintext)
819 			plaintext->cd_length =
820 			    plaintext->cd_offset - saved_offset;
821 	} else {
822 		plaintext->cd_length = saved_length;
823 	}
824 	plaintext->cd_offset = saved_offset;
825 
826 
827 	return (ret);
828 }
829 
830 /* ARGSUSED */
831 static int
832 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
833     crypto_req_handle_t req)
834 {
835 
836 /* EXPORT DELETE START */
837 
838 	aes_ctx_t *aes_ctx;
839 	int ret;
840 
841 	ASSERT(ctx->cc_provider_private != NULL);
842 	aes_ctx = ctx->cc_provider_private;
843 
844 	if (data->cd_format != CRYPTO_DATA_RAW &&
845 	    data->cd_format != CRYPTO_DATA_UIO &&
846 	    data->cd_format != CRYPTO_DATA_MBLK) {
847 		return (CRYPTO_ARGUMENTS_BAD);
848 	}
849 
850 	if (aes_ctx->ac_flags & CTR_MODE) {
851 		if (aes_ctx->ac_remainder_len > 0) {
852 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
853 			    aes_encrypt_block);
854 			if (ret != CRYPTO_SUCCESS)
855 				return (ret);
856 		}
857 	} else if (aes_ctx->ac_flags & CCM_MODE) {
858 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
859 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
860 		if (ret != CRYPTO_SUCCESS) {
861 			return (ret);
862 		}
863 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
864 		size_t saved_offset = data->cd_offset;
865 
866 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
867 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
868 		    aes_xor_block);
869 		if (ret != CRYPTO_SUCCESS) {
870 			return (ret);
871 		}
872 		data->cd_length = data->cd_offset - saved_offset;
873 		data->cd_offset = saved_offset;
874 	} else {
875 		/*
876 		 * There must be no unprocessed plaintext.
877 		 * This happens if the length of the last data is
878 		 * not a multiple of the AES block length.
879 		 */
880 		if (aes_ctx->ac_remainder_len > 0) {
881 			return (CRYPTO_DATA_LEN_RANGE);
882 		}
883 		data->cd_length = 0;
884 	}
885 
886 	(void) aes_free_context(ctx);
887 
888 /* EXPORT DELETE END */
889 
890 	return (CRYPTO_SUCCESS);
891 }
892 
893 /* ARGSUSED */
894 static int
895 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
896     crypto_req_handle_t req)
897 {
898 
899 /* EXPORT DELETE START */
900 
901 	aes_ctx_t *aes_ctx;
902 	int ret;
903 	off_t saved_offset;
904 	size_t saved_length;
905 
906 	ASSERT(ctx->cc_provider_private != NULL);
907 	aes_ctx = ctx->cc_provider_private;
908 
909 	if (data->cd_format != CRYPTO_DATA_RAW &&
910 	    data->cd_format != CRYPTO_DATA_UIO &&
911 	    data->cd_format != CRYPTO_DATA_MBLK) {
912 		return (CRYPTO_ARGUMENTS_BAD);
913 	}
914 
915 	/*
916 	 * There must be no unprocessed ciphertext.
917 	 * This happens if the length of the last ciphertext is
918 	 * not a multiple of the AES block length.
919 	 */
920 	if (aes_ctx->ac_remainder_len > 0) {
921 		if ((aes_ctx->ac_flags & CTR_MODE) == 0)
922 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
923 		else {
924 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
925 			    aes_encrypt_block);
926 			if (ret == CRYPTO_DATA_LEN_RANGE)
927 				ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
928 			if (ret != CRYPTO_SUCCESS)
929 				return (ret);
930 		}
931 	}
932 
933 	if (aes_ctx->ac_flags & CCM_MODE) {
934 		/*
935 		 * This is where all the plaintext is returned, make sure
936 		 * the plaintext buffer is big enough
937 		 */
938 		size_t pt_len = aes_ctx->ac_data_len;
939 		if (data->cd_length < pt_len) {
940 			data->cd_length = pt_len;
941 			return (CRYPTO_BUFFER_TOO_SMALL);
942 		}
943 
944 		ASSERT(aes_ctx->ac_processed_data_len == pt_len);
945 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
946 		saved_offset = data->cd_offset;
947 		saved_length = data->cd_length;
948 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
949 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
950 		    aes_xor_block);
951 		if (ret == CRYPTO_SUCCESS) {
952 			data->cd_length = data->cd_offset - saved_offset;
953 		} else {
954 			data->cd_length = saved_length;
955 		}
956 
957 		data->cd_offset = saved_offset;
958 		if (ret != CRYPTO_SUCCESS) {
959 			return (ret);
960 		}
961 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
962 		/*
963 		 * This is where all the plaintext is returned, make sure
964 		 * the plaintext buffer is big enough
965 		 */
966 		gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
967 		size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
968 
969 		if (data->cd_length < pt_len) {
970 			data->cd_length = pt_len;
971 			return (CRYPTO_BUFFER_TOO_SMALL);
972 		}
973 
974 		saved_offset = data->cd_offset;
975 		saved_length = data->cd_length;
976 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
977 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
978 		if (ret == CRYPTO_SUCCESS) {
979 			data->cd_length = data->cd_offset - saved_offset;
980 		} else {
981 			data->cd_length = saved_length;
982 		}
983 
984 		data->cd_offset = saved_offset;
985 		if (ret != CRYPTO_SUCCESS) {
986 			return (ret);
987 		}
988 	}
989 
990 
991 	if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
992 		data->cd_length = 0;
993 	}
994 
995 	(void) aes_free_context(ctx);
996 
997 /* EXPORT DELETE END */
998 
999 	return (CRYPTO_SUCCESS);
1000 }
1001 
1002 /* ARGSUSED */
1003 static int
1004 aes_encrypt_atomic(crypto_provider_handle_t provider,
1005     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1006     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1007     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1008 {
1009 	aes_ctx_t aes_ctx;	/* on the stack */
1010 	off_t saved_offset;
1011 	size_t saved_length;
1012 	size_t length_needed;
1013 	int ret;
1014 
1015 	AES_ARG_INPLACE(plaintext, ciphertext);
1016 
1017 	/*
1018 	 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
1019 	 * be a multiple of AES block size.
1020 	 */
1021 	switch (mechanism->cm_type) {
1022 	case AES_CTR_MECH_INFO_TYPE:
1023 	case AES_CCM_MECH_INFO_TYPE:
1024 	case AES_GCM_MECH_INFO_TYPE:
1025 	case AES_GMAC_MECH_INFO_TYPE:
1026 		break;
1027 	default:
1028 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1029 			return (CRYPTO_DATA_LEN_RANGE);
1030 	}
1031 
1032 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1033 		return (ret);
1034 
1035 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1036 
1037 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1038 	    crypto_kmflag(req), B_TRUE);
1039 	if (ret != CRYPTO_SUCCESS)
1040 		return (ret);
1041 
1042 	switch (mechanism->cm_type) {
1043 	case AES_CCM_MECH_INFO_TYPE:
1044 		length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1045 		break;
1046 	case AES_GMAC_MECH_INFO_TYPE:
1047 		if (plaintext->cd_length != 0)
1048 			return (CRYPTO_ARGUMENTS_BAD);
1049 		/* FALLTHRU */
1050 	case AES_GCM_MECH_INFO_TYPE:
1051 		length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1052 		break;
1053 	default:
1054 		length_needed = plaintext->cd_length;
1055 	}
1056 
1057 	/* return size of buffer needed to store output */
1058 	if (ciphertext->cd_length < length_needed) {
1059 		ciphertext->cd_length = length_needed;
1060 		ret = CRYPTO_BUFFER_TOO_SMALL;
1061 		goto out;
1062 	}
1063 
1064 	saved_offset = ciphertext->cd_offset;
1065 	saved_length = ciphertext->cd_length;
1066 
1067 	/*
1068 	 * Do an update on the specified input data.
1069 	 */
1070 	switch (plaintext->cd_format) {
1071 	case CRYPTO_DATA_RAW:
1072 		ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1073 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1074 		break;
1075 	case CRYPTO_DATA_UIO:
1076 		ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1077 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1078 		break;
1079 	case CRYPTO_DATA_MBLK:
1080 		ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1081 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1082 		break;
1083 	default:
1084 		ret = CRYPTO_ARGUMENTS_BAD;
1085 	}
1086 
1087 	if (ret == CRYPTO_SUCCESS) {
1088 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1089 			ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1090 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1091 			    aes_xor_block);
1092 			if (ret != CRYPTO_SUCCESS)
1093 				goto out;
1094 			ASSERT(aes_ctx.ac_remainder_len == 0);
1095 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1096 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1097 			ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1098 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1099 			    aes_copy_block, aes_xor_block);
1100 			if (ret != CRYPTO_SUCCESS)
1101 				goto out;
1102 			ASSERT(aes_ctx.ac_remainder_len == 0);
1103 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1104 			if (aes_ctx.ac_remainder_len > 0) {
1105 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1106 				    ciphertext, aes_encrypt_block);
1107 				if (ret != CRYPTO_SUCCESS)
1108 					goto out;
1109 			}
1110 		} else {
1111 			ASSERT(aes_ctx.ac_remainder_len == 0);
1112 		}
1113 
1114 		if (plaintext != ciphertext) {
1115 			ciphertext->cd_length =
1116 			    ciphertext->cd_offset - saved_offset;
1117 		}
1118 	} else {
1119 		ciphertext->cd_length = saved_length;
1120 	}
1121 	ciphertext->cd_offset = saved_offset;
1122 
1123 out:
1124 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1125 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1126 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1127 	}
1128 
1129 	return (ret);
1130 }
1131 
1132 /* ARGSUSED */
1133 static int
1134 aes_decrypt_atomic(crypto_provider_handle_t provider,
1135     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1136     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1137     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1138 {
1139 	aes_ctx_t aes_ctx;	/* on the stack */
1140 	off_t saved_offset;
1141 	size_t saved_length;
1142 	size_t length_needed;
1143 	int ret;
1144 
1145 	AES_ARG_INPLACE(ciphertext, plaintext);
1146 
1147 	/*
1148 	 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1149 	 * be a multiple of AES block size.
1150 	 */
1151 	switch (mechanism->cm_type) {
1152 	case AES_CTR_MECH_INFO_TYPE:
1153 	case AES_CCM_MECH_INFO_TYPE:
1154 	case AES_GCM_MECH_INFO_TYPE:
1155 	case AES_GMAC_MECH_INFO_TYPE:
1156 		break;
1157 	default:
1158 		if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1159 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1160 	}
1161 
1162 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1163 		return (ret);
1164 
1165 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1166 
1167 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1168 	    crypto_kmflag(req), B_FALSE);
1169 	if (ret != CRYPTO_SUCCESS)
1170 		return (ret);
1171 
1172 	switch (mechanism->cm_type) {
1173 	case AES_CCM_MECH_INFO_TYPE:
1174 		length_needed = aes_ctx.ac_data_len;
1175 		break;
1176 	case AES_GCM_MECH_INFO_TYPE:
1177 		length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1178 		break;
1179 	case AES_GMAC_MECH_INFO_TYPE:
1180 		if (plaintext->cd_length != 0)
1181 			return (CRYPTO_ARGUMENTS_BAD);
1182 		length_needed = 0;
1183 		break;
1184 	default:
1185 		length_needed = ciphertext->cd_length;
1186 	}
1187 
1188 	/* return size of buffer needed to store output */
1189 	if (plaintext->cd_length < length_needed) {
1190 		plaintext->cd_length = length_needed;
1191 		ret = CRYPTO_BUFFER_TOO_SMALL;
1192 		goto out;
1193 	}
1194 
1195 	saved_offset = plaintext->cd_offset;
1196 	saved_length = plaintext->cd_length;
1197 
1198 	if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1199 	    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1200 		gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1201 
1202 	/*
1203 	 * Do an update on the specified input data.
1204 	 */
1205 	switch (ciphertext->cd_format) {
1206 	case CRYPTO_DATA_RAW:
1207 		ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1208 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1209 		break;
1210 	case CRYPTO_DATA_UIO:
1211 		ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1212 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1213 		break;
1214 	case CRYPTO_DATA_MBLK:
1215 		ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1216 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1217 		break;
1218 	default:
1219 		ret = CRYPTO_ARGUMENTS_BAD;
1220 	}
1221 
1222 	if (ret == CRYPTO_SUCCESS) {
1223 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1224 			ASSERT(aes_ctx.ac_processed_data_len
1225 			    == aes_ctx.ac_data_len);
1226 			ASSERT(aes_ctx.ac_processed_mac_len
1227 			    == aes_ctx.ac_mac_len);
1228 			ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1229 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1230 			    aes_copy_block, aes_xor_block);
1231 			ASSERT(aes_ctx.ac_remainder_len == 0);
1232 			if ((ret == CRYPTO_SUCCESS) &&
1233 			    (ciphertext != plaintext)) {
1234 				plaintext->cd_length =
1235 				    plaintext->cd_offset - saved_offset;
1236 			} else {
1237 				plaintext->cd_length = saved_length;
1238 			}
1239 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1240 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1241 			ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1242 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1243 			    aes_xor_block);
1244 			ASSERT(aes_ctx.ac_remainder_len == 0);
1245 			if ((ret == CRYPTO_SUCCESS) &&
1246 			    (ciphertext != plaintext)) {
1247 				plaintext->cd_length =
1248 				    plaintext->cd_offset - saved_offset;
1249 			} else {
1250 				plaintext->cd_length = saved_length;
1251 			}
1252 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1253 			ASSERT(aes_ctx.ac_remainder_len == 0);
1254 			if (ciphertext != plaintext)
1255 				plaintext->cd_length =
1256 				    plaintext->cd_offset - saved_offset;
1257 		} else {
1258 			if (aes_ctx.ac_remainder_len > 0) {
1259 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1260 				    plaintext, aes_encrypt_block);
1261 				if (ret == CRYPTO_DATA_LEN_RANGE)
1262 					ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1263 				if (ret != CRYPTO_SUCCESS)
1264 					goto out;
1265 			}
1266 			if (ciphertext != plaintext)
1267 				plaintext->cd_length =
1268 				    plaintext->cd_offset - saved_offset;
1269 		}
1270 	} else {
1271 		plaintext->cd_length = saved_length;
1272 	}
1273 	plaintext->cd_offset = saved_offset;
1274 
1275 out:
1276 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1277 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1278 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1279 	}
1280 
1281 	if (aes_ctx.ac_flags & CCM_MODE) {
1282 		if (aes_ctx.ac_pt_buf != NULL) {
1283 			kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1284 		}
1285 	} else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1286 		if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1287 			kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1288 			    ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1289 		}
1290 	}
1291 
1292 	return (ret);
1293 }
1294 
1295 /*
1296  * KCF software provider context template entry points.
1297  */
1298 /* ARGSUSED */
1299 static int
1300 aes_create_ctx_template(crypto_provider_handle_t provider,
1301     crypto_mechanism_t *mechanism, crypto_key_t *key,
1302     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1303 {
1304 
1305 /* EXPORT DELETE START */
1306 
1307 	void *keysched;
1308 	size_t size;
1309 	int rv;
1310 
1311 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1312 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1313 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1314 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1315 	    mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1316 	    mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1317 		return (CRYPTO_MECHANISM_INVALID);
1318 
1319 	if ((keysched = aes_alloc_keysched(&size,
1320 	    crypto_kmflag(req))) == NULL) {
1321 		return (CRYPTO_HOST_MEMORY);
1322 	}
1323 
1324 	/*
1325 	 * Initialize key schedule.  Key length information is stored
1326 	 * in the key.
1327 	 */
1328 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1329 		bzero(keysched, size);
1330 		kmem_free(keysched, size);
1331 		return (rv);
1332 	}
1333 
1334 	*tmpl = keysched;
1335 	*tmpl_size = size;
1336 
1337 /* EXPORT DELETE END */
1338 
1339 	return (CRYPTO_SUCCESS);
1340 }
1341 
1342 
1343 static int
1344 aes_free_context(crypto_ctx_t *ctx)
1345 {
1346 
1347 /* EXPORT DELETE START */
1348 
1349 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1350 
1351 	if (aes_ctx != NULL) {
1352 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1353 			ASSERT(aes_ctx->ac_keysched_len != 0);
1354 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1355 			kmem_free(aes_ctx->ac_keysched,
1356 			    aes_ctx->ac_keysched_len);
1357 		}
1358 		crypto_free_mode_ctx(aes_ctx);
1359 		ctx->cc_provider_private = NULL;
1360 	}
1361 
1362 /* EXPORT DELETE END */
1363 
1364 	return (CRYPTO_SUCCESS);
1365 }
1366 
1367 
1368 static int
1369 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1370     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1371     boolean_t is_encrypt_init)
1372 {
1373 	int rv = CRYPTO_SUCCESS;
1374 
1375 /* EXPORT DELETE START */
1376 
1377 	void *keysched;
1378 	size_t size;
1379 
1380 	if (template == NULL) {
1381 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1382 			return (CRYPTO_HOST_MEMORY);
1383 		/*
1384 		 * Initialize key schedule.
1385 		 * Key length is stored in the key.
1386 		 */
1387 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1388 			kmem_free(keysched, size);
1389 			return (rv);
1390 		}
1391 
1392 		aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1393 		aes_ctx->ac_keysched_len = size;
1394 	} else {
1395 		keysched = template;
1396 	}
1397 	aes_ctx->ac_keysched = keysched;
1398 
1399 	switch (mechanism->cm_type) {
1400 	case AES_CBC_MECH_INFO_TYPE:
1401 		rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1402 		    mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1403 		break;
1404 	case AES_CTR_MECH_INFO_TYPE: {
1405 		CK_AES_CTR_PARAMS *pp;
1406 
1407 		if (mechanism->cm_param == NULL ||
1408 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1409 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1410 		}
1411 		pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1412 		rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1413 		    pp->cb, aes_copy_block);
1414 		break;
1415 	}
1416 	case AES_CCM_MECH_INFO_TYPE:
1417 		if (mechanism->cm_param == NULL ||
1418 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1419 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1420 		}
1421 		rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1422 		    kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1423 		    aes_xor_block);
1424 		break;
1425 	case AES_GCM_MECH_INFO_TYPE:
1426 		if (mechanism->cm_param == NULL ||
1427 		    mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1428 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1429 		}
1430 		rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1431 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1432 		    aes_xor_block);
1433 		break;
1434 	case AES_GMAC_MECH_INFO_TYPE:
1435 		if (mechanism->cm_param == NULL ||
1436 		    mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1437 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1438 		}
1439 		rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1440 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1441 		    aes_xor_block);
1442 		break;
1443 	case AES_ECB_MECH_INFO_TYPE:
1444 		aes_ctx->ac_flags |= ECB_MODE;
1445 	}
1446 
1447 	if (rv != CRYPTO_SUCCESS) {
1448 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1449 			bzero(keysched, size);
1450 			kmem_free(keysched, size);
1451 		}
1452 	}
1453 
1454 /* EXPORT DELETE END */
1455 
1456 	return (rv);
1457 }
1458 
1459 static int
1460 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1461     CK_AES_GCM_PARAMS *gcm_params)
1462 {
1463 	/* LINTED: pointer alignment */
1464 	CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1465 
1466 	if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1467 		return (CRYPTO_MECHANISM_INVALID);
1468 
1469 	if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1470 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1471 
1472 	if (params->pIv == NULL)
1473 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1474 
1475 	gcm_params->pIv = params->pIv;
1476 	gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1477 	gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1478 
1479 	if (data == NULL)
1480 		return (CRYPTO_SUCCESS);
1481 
1482 	if (data->cd_format != CRYPTO_DATA_RAW)
1483 		return (CRYPTO_ARGUMENTS_BAD);
1484 
1485 	gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1486 	gcm_params->ulAADLen = data->cd_length;
1487 	return (CRYPTO_SUCCESS);
1488 }
1489 
1490 static int
1491 aes_mac_atomic(crypto_provider_handle_t provider,
1492     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1493     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1494     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1495 {
1496 	CK_AES_GCM_PARAMS gcm_params;
1497 	crypto_mechanism_t gcm_mech;
1498 	int rv;
1499 
1500 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1501 	    != CRYPTO_SUCCESS)
1502 		return (rv);
1503 
1504 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1505 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1506 	gcm_mech.cm_param = (char *)&gcm_params;
1507 
1508 	return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1509 	    key, &null_crypto_data, mac, template, req));
1510 }
1511 
1512 static int
1513 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1514     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1515     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1516     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1517 {
1518 	CK_AES_GCM_PARAMS gcm_params;
1519 	crypto_mechanism_t gcm_mech;
1520 	int rv;
1521 
1522 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1523 	    != CRYPTO_SUCCESS)
1524 		return (rv);
1525 
1526 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1527 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1528 	gcm_mech.cm_param = (char *)&gcm_params;
1529 
1530 	return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1531 	    key, mac, &null_crypto_data, template, req));
1532 }
1533 
1534 /*
1535  * AES Power-Up Self-Test
1536  */
1537 void
1538 aes_POST(int *rc)
1539 {
1540 
1541 	int ret;
1542 
1543 	/* AES Power-Up Self-Test for 128-bit key. */
1544 	ret = fips_aes_post(FIPS_AES_128_KEY_SIZE);
1545 
1546 	if (ret != CRYPTO_SUCCESS)
1547 		goto out;
1548 
1549 	/* AES Power-Up Self-Test for 192-bit key. */
1550 	ret = fips_aes_post(FIPS_AES_192_KEY_SIZE);
1551 
1552 	if (ret != CRYPTO_SUCCESS)
1553 		goto out;
1554 
1555 	/* AES Power-Up Self-Test for 256-bit key. */
1556 	ret = fips_aes_post(FIPS_AES_256_KEY_SIZE);
1557 
1558 out:
1559 	*rc = ret;
1560 
1561 }
1562