xref: /titanic_51/usr/src/uts/common/crypto/io/aes.c (revision f5c2e7ea56aaa46a9976476fb0cb1f02b9426f07)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23  */
24 
25 /*
26  * AES provider for the Kernel Cryptographic Framework (KCF)
27  */
28 
29 #include <sys/types.h>
30 #include <sys/systm.h>
31 #include <sys/modctl.h>
32 #include <sys/cmn_err.h>
33 #include <sys/ddi.h>
34 #include <sys/crypto/common.h>
35 #include <sys/crypto/impl.h>
36 #include <sys/crypto/spi.h>
37 #include <sys/sysmacros.h>
38 #include <sys/strsun.h>
39 #include <modes/modes.h>
40 #define	_AES_IMPL
41 #include <aes/aes_impl.h>
42 
43 extern struct mod_ops mod_cryptoops;
44 
45 /*
46  * Module linkage information for the kernel.
47  */
48 static struct modlcrypto modlcrypto = {
49 	&mod_cryptoops,
50 	"AES Kernel SW Provider"
51 };
52 
53 static struct modlinkage modlinkage = {
54 	MODREV_1,
55 	(void *)&modlcrypto,
56 	NULL
57 };
58 
59 /*
60  * The following definitions are to keep EXPORT_SRC happy.
61  */
62 #ifndef AES_MIN_KEY_BYTES
63 #define	AES_MIN_KEY_BYTES		0
64 #endif
65 
66 #ifndef AES_MAX_KEY_BYTES
67 #define	AES_MAX_KEY_BYTES		0
68 #endif
69 
70 /*
71  * Mechanism info structure passed to KCF during registration.
72  */
73 static crypto_mech_info_t aes_mech_info_tab[] = {
74 	/* AES_ECB */
75 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
76 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
77 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
78 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
79 	/* AES_CBC */
80 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
81 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
82 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
83 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
84 	/* AES_CTR */
85 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
86 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
87 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
88 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
89 	/* AES_CCM */
90 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
91 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
92 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
93 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
94 	/* AES_GCM */
95 	{SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
96 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
97 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
98 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 	/* AES_GMAC */
100 	{SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
101 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
102 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
103 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
104 	    CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
105 	    CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
106 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
107 };
108 
109 /* operations are in-place if the output buffer is NULL */
110 #define	AES_ARG_INPLACE(input, output)				\
111 	if ((output) == NULL)					\
112 		(output) = (input);
113 
114 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
115 
116 static crypto_control_ops_t aes_control_ops = {
117 	aes_provider_status
118 };
119 
120 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
121     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
122 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
123     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
124 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
125     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
126 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
127     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
128 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
129     crypto_req_handle_t);
130 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
131     crypto_req_handle_t);
132 
133 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
134     crypto_req_handle_t);
135 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
136     crypto_data_t *, crypto_req_handle_t);
137 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
138     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
139     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
140 
141 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
142     crypto_req_handle_t);
143 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
144     crypto_data_t *, crypto_req_handle_t);
145 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
146     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
147     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
148 
149 static crypto_cipher_ops_t aes_cipher_ops = {
150 	aes_encrypt_init,
151 	aes_encrypt,
152 	aes_encrypt_update,
153 	aes_encrypt_final,
154 	aes_encrypt_atomic,
155 	aes_decrypt_init,
156 	aes_decrypt,
157 	aes_decrypt_update,
158 	aes_decrypt_final,
159 	aes_decrypt_atomic
160 };
161 
162 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
163     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
164     crypto_spi_ctx_template_t, crypto_req_handle_t);
165 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
166     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
167     crypto_spi_ctx_template_t, crypto_req_handle_t);
168 
169 static crypto_mac_ops_t aes_mac_ops = {
170 	NULL,
171 	NULL,
172 	NULL,
173 	NULL,
174 	aes_mac_atomic,
175 	aes_mac_verify_atomic
176 };
177 
178 static int aes_create_ctx_template(crypto_provider_handle_t,
179     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
180     size_t *, crypto_req_handle_t);
181 static int aes_free_context(crypto_ctx_t *);
182 
183 static crypto_ctx_ops_t aes_ctx_ops = {
184 	aes_create_ctx_template,
185 	aes_free_context
186 };
187 
188 static crypto_ops_t aes_crypto_ops = {
189 	&aes_control_ops,
190 	NULL,
191 	&aes_cipher_ops,
192 	&aes_mac_ops,
193 	NULL,
194 	NULL,
195 	NULL,
196 	NULL,
197 	NULL,
198 	NULL,
199 	NULL,
200 	NULL,
201 	NULL,
202 	&aes_ctx_ops,
203 	NULL,
204 	NULL,
205 	NULL,
206 };
207 
208 static crypto_provider_info_t aes_prov_info = {
209 	CRYPTO_SPI_VERSION_4,
210 	"AES Software Provider",
211 	CRYPTO_SW_PROVIDER,
212 	{&modlinkage},
213 	NULL,
214 	&aes_crypto_ops,
215 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
216 	aes_mech_info_tab
217 };
218 
219 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
220 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
221 
222 int
223 _init(void)
224 {
225 	int ret;
226 
227 	if ((ret = mod_install(&modlinkage)) != 0)
228 		return (ret);
229 
230 	/* Register with KCF.  If the registration fails, remove the module. */
231 	if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
232 		(void) mod_remove(&modlinkage);
233 		return (EACCES);
234 	}
235 
236 	return (0);
237 }
238 
239 int
240 _fini(void)
241 {
242 	/* Unregister from KCF if module is registered */
243 	if (aes_prov_handle != NULL) {
244 		if (crypto_unregister_provider(aes_prov_handle))
245 			return (EBUSY);
246 
247 		aes_prov_handle = NULL;
248 	}
249 
250 	return (mod_remove(&modlinkage));
251 }
252 
253 int
254 _info(struct modinfo *modinfop)
255 {
256 	return (mod_info(&modlinkage, modinfop));
257 }
258 
259 
260 static int
261 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
262 {
263 	void *p = NULL;
264 	boolean_t param_required = B_TRUE;
265 	size_t param_len;
266 	void *(*alloc_fun)(int);
267 	int rv = CRYPTO_SUCCESS;
268 
269 	switch (mechanism->cm_type) {
270 	case AES_ECB_MECH_INFO_TYPE:
271 		param_required = B_FALSE;
272 		alloc_fun = ecb_alloc_ctx;
273 		break;
274 	case AES_CBC_MECH_INFO_TYPE:
275 		param_len = AES_BLOCK_LEN;
276 		alloc_fun = cbc_alloc_ctx;
277 		break;
278 	case AES_CTR_MECH_INFO_TYPE:
279 		param_len = sizeof (CK_AES_CTR_PARAMS);
280 		alloc_fun = ctr_alloc_ctx;
281 		break;
282 	case AES_CCM_MECH_INFO_TYPE:
283 		param_len = sizeof (CK_AES_CCM_PARAMS);
284 		alloc_fun = ccm_alloc_ctx;
285 		break;
286 	case AES_GCM_MECH_INFO_TYPE:
287 		param_len = sizeof (CK_AES_GCM_PARAMS);
288 		alloc_fun = gcm_alloc_ctx;
289 		break;
290 	case AES_GMAC_MECH_INFO_TYPE:
291 		param_len = sizeof (CK_AES_GMAC_PARAMS);
292 		alloc_fun = gmac_alloc_ctx;
293 		break;
294 	default:
295 		rv = CRYPTO_MECHANISM_INVALID;
296 		return (rv);
297 	}
298 	if (param_required && mechanism->cm_param != NULL &&
299 	    mechanism->cm_param_len != param_len) {
300 		rv = CRYPTO_MECHANISM_PARAM_INVALID;
301 	}
302 	if (ctx != NULL) {
303 		p = (alloc_fun)(kmflag);
304 		*ctx = p;
305 	}
306 	return (rv);
307 }
308 
309 /* EXPORT DELETE START */
310 
311 /*
312  * Initialize key schedules for AES
313  */
314 static int
315 init_keysched(crypto_key_t *key, void *newbie)
316 {
317 	/*
318 	 * Only keys by value are supported by this module.
319 	 */
320 	switch (key->ck_format) {
321 	case CRYPTO_KEY_RAW:
322 		if (key->ck_length < AES_MINBITS ||
323 		    key->ck_length > AES_MAXBITS) {
324 			return (CRYPTO_KEY_SIZE_RANGE);
325 		}
326 
327 		/* key length must be either 128, 192, or 256 */
328 		if ((key->ck_length & 63) != 0)
329 			return (CRYPTO_KEY_SIZE_RANGE);
330 		break;
331 	default:
332 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
333 	}
334 
335 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
336 	return (CRYPTO_SUCCESS);
337 }
338 
339 /* EXPORT DELETE END */
340 
341 /*
342  * KCF software provider control entry points.
343  */
344 /* ARGSUSED */
345 static void
346 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
347 {
348 	*status = CRYPTO_PROVIDER_READY;
349 }
350 
351 static int
352 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
353     crypto_key_t *key, crypto_spi_ctx_template_t template,
354     crypto_req_handle_t req) {
355 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
356 }
357 
358 static int
359 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
360     crypto_key_t *key, crypto_spi_ctx_template_t template,
361     crypto_req_handle_t req) {
362 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
363 }
364 
365 
366 
367 /*
368  * KCF software provider encrypt entry points.
369  */
370 static int
371 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
372     crypto_key_t *key, crypto_spi_ctx_template_t template,
373     crypto_req_handle_t req, boolean_t is_encrypt_init)
374 {
375 
376 /* EXPORT DELETE START */
377 
378 	aes_ctx_t *aes_ctx;
379 	int rv;
380 	int kmflag;
381 
382 	/*
383 	 * Only keys by value are supported by this module.
384 	 */
385 	if (key->ck_format != CRYPTO_KEY_RAW) {
386 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
387 	}
388 
389 	kmflag = crypto_kmflag(req);
390 	if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
391 	    != CRYPTO_SUCCESS)
392 		return (rv);
393 
394 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
395 	    is_encrypt_init);
396 	if (rv != CRYPTO_SUCCESS) {
397 		crypto_free_mode_ctx(aes_ctx);
398 		return (rv);
399 	}
400 
401 	ctx->cc_provider_private = aes_ctx;
402 
403 /* EXPORT DELETE END */
404 
405 	return (CRYPTO_SUCCESS);
406 }
407 
408 static void
409 aes_copy_block64(uint8_t *in, uint64_t *out)
410 {
411 	if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
412 		/* LINTED: pointer alignment */
413 		out[0] = *(uint64_t *)&in[0];
414 		/* LINTED: pointer alignment */
415 		out[1] = *(uint64_t *)&in[8];
416 	} else {
417 		uint8_t *iv8 = (uint8_t *)&out[0];
418 
419 		AES_COPY_BLOCK(in, iv8);
420 	}
421 }
422 
423 
424 static int
425 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
426     crypto_data_t *ciphertext, crypto_req_handle_t req)
427 {
428 	int ret = CRYPTO_FAILED;
429 
430 /* EXPORT DELETE START */
431 
432 	aes_ctx_t *aes_ctx;
433 	size_t saved_length, saved_offset, length_needed;
434 
435 	ASSERT(ctx->cc_provider_private != NULL);
436 	aes_ctx = ctx->cc_provider_private;
437 
438 	/*
439 	 * For block ciphers, plaintext must be a multiple of AES block size.
440 	 * This test is only valid for ciphers whose blocksize is a power of 2.
441 	 */
442 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
443 	    == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
444 		return (CRYPTO_DATA_LEN_RANGE);
445 
446 	AES_ARG_INPLACE(plaintext, ciphertext);
447 
448 	/*
449 	 * We need to just return the length needed to store the output.
450 	 * We should not destroy the context for the following case.
451 	 */
452 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
453 	case CCM_MODE:
454 		length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
455 		break;
456 	case GCM_MODE:
457 		length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
458 		break;
459 	case GMAC_MODE:
460 		if (plaintext->cd_length != 0)
461 			return (CRYPTO_ARGUMENTS_BAD);
462 
463 		length_needed = aes_ctx->ac_tag_len;
464 		break;
465 	default:
466 		length_needed = plaintext->cd_length;
467 	}
468 
469 	if (ciphertext->cd_length < length_needed) {
470 		ciphertext->cd_length = length_needed;
471 		return (CRYPTO_BUFFER_TOO_SMALL);
472 	}
473 
474 	saved_length = ciphertext->cd_length;
475 	saved_offset = ciphertext->cd_offset;
476 
477 	/*
478 	 * Do an update on the specified input data.
479 	 */
480 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
481 	if (ret != CRYPTO_SUCCESS) {
482 		return (ret);
483 	}
484 
485 	/*
486 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
487 	 * left-over unprocessed data, and compute the MAC
488 	 */
489 	if (aes_ctx->ac_flags & CCM_MODE) {
490 		/*
491 		 * ccm_encrypt_final() will compute the MAC and append
492 		 * it to existing ciphertext. So, need to adjust the left over
493 		 * length value accordingly
494 		 */
495 
496 		/* order of following 2 lines MUST not be reversed */
497 		ciphertext->cd_offset = ciphertext->cd_length;
498 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
499 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
500 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
501 		if (ret != CRYPTO_SUCCESS) {
502 			return (ret);
503 		}
504 
505 		if (plaintext != ciphertext) {
506 			ciphertext->cd_length =
507 			    ciphertext->cd_offset - saved_offset;
508 		}
509 		ciphertext->cd_offset = saved_offset;
510 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
511 		/*
512 		 * gcm_encrypt_final() will compute the MAC and append
513 		 * it to existing ciphertext. So, need to adjust the left over
514 		 * length value accordingly
515 		 */
516 
517 		/* order of following 2 lines MUST not be reversed */
518 		ciphertext->cd_offset = ciphertext->cd_length;
519 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
520 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
521 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
522 		    aes_xor_block);
523 		if (ret != CRYPTO_SUCCESS) {
524 			return (ret);
525 		}
526 
527 		if (plaintext != ciphertext) {
528 			ciphertext->cd_length =
529 			    ciphertext->cd_offset - saved_offset;
530 		}
531 		ciphertext->cd_offset = saved_offset;
532 	}
533 
534 	ASSERT(aes_ctx->ac_remainder_len == 0);
535 	(void) aes_free_context(ctx);
536 
537 /* EXPORT DELETE END */
538 
539 	return (ret);
540 }
541 
542 
543 static int
544 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
545     crypto_data_t *plaintext, crypto_req_handle_t req)
546 {
547 	int ret = CRYPTO_FAILED;
548 
549 /* EXPORT DELETE START */
550 
551 	aes_ctx_t *aes_ctx;
552 	off_t saved_offset;
553 	size_t saved_length, length_needed;
554 
555 	ASSERT(ctx->cc_provider_private != NULL);
556 	aes_ctx = ctx->cc_provider_private;
557 
558 	/*
559 	 * For block ciphers, plaintext must be a multiple of AES block size.
560 	 * This test is only valid for ciphers whose blocksize is a power of 2.
561 	 */
562 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
563 	    == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
564 		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
565 	}
566 
567 	AES_ARG_INPLACE(ciphertext, plaintext);
568 
569 	/*
570 	 * Return length needed to store the output.
571 	 * Do not destroy context when plaintext buffer is too small.
572 	 *
573 	 * CCM:  plaintext is MAC len smaller than cipher text
574 	 * GCM:  plaintext is TAG len smaller than cipher text
575 	 * GMAC: plaintext length must be zero
576 	 */
577 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
578 	case CCM_MODE:
579 		length_needed = aes_ctx->ac_processed_data_len;
580 		break;
581 	case GCM_MODE:
582 		length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
583 		break;
584 	case GMAC_MODE:
585 		if (plaintext->cd_length != 0)
586 			return (CRYPTO_ARGUMENTS_BAD);
587 
588 		length_needed = 0;
589 		break;
590 	default:
591 		length_needed = ciphertext->cd_length;
592 	}
593 
594 	if (plaintext->cd_length < length_needed) {
595 		plaintext->cd_length = length_needed;
596 		return (CRYPTO_BUFFER_TOO_SMALL);
597 	}
598 
599 	saved_offset = plaintext->cd_offset;
600 	saved_length = plaintext->cd_length;
601 
602 	/*
603 	 * Do an update on the specified input data.
604 	 */
605 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
606 	if (ret != CRYPTO_SUCCESS) {
607 		goto cleanup;
608 	}
609 
610 	if (aes_ctx->ac_flags & CCM_MODE) {
611 		ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
612 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
613 
614 		/* order of following 2 lines MUST not be reversed */
615 		plaintext->cd_offset = plaintext->cd_length;
616 		plaintext->cd_length = saved_length - plaintext->cd_length;
617 
618 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
619 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
620 		    aes_xor_block);
621 		if (ret == CRYPTO_SUCCESS) {
622 			if (plaintext != ciphertext) {
623 				plaintext->cd_length =
624 				    plaintext->cd_offset - saved_offset;
625 			}
626 		} else {
627 			plaintext->cd_length = saved_length;
628 		}
629 
630 		plaintext->cd_offset = saved_offset;
631 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
632 		/* order of following 2 lines MUST not be reversed */
633 		plaintext->cd_offset = plaintext->cd_length;
634 		plaintext->cd_length = saved_length - plaintext->cd_length;
635 
636 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
637 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
638 		if (ret == CRYPTO_SUCCESS) {
639 			if (plaintext != ciphertext) {
640 				plaintext->cd_length =
641 				    plaintext->cd_offset - saved_offset;
642 			}
643 		} else {
644 			plaintext->cd_length = saved_length;
645 		}
646 
647 		plaintext->cd_offset = saved_offset;
648 	}
649 
650 	ASSERT(aes_ctx->ac_remainder_len == 0);
651 
652 cleanup:
653 	(void) aes_free_context(ctx);
654 
655 /* EXPORT DELETE END */
656 
657 	return (ret);
658 }
659 
660 
661 /* ARGSUSED */
662 static int
663 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
664     crypto_data_t *ciphertext, crypto_req_handle_t req)
665 {
666 	off_t saved_offset;
667 	size_t saved_length, out_len;
668 	int ret = CRYPTO_SUCCESS;
669 	aes_ctx_t *aes_ctx;
670 
671 	ASSERT(ctx->cc_provider_private != NULL);
672 	aes_ctx = ctx->cc_provider_private;
673 
674 	AES_ARG_INPLACE(plaintext, ciphertext);
675 
676 	/* compute number of bytes that will hold the ciphertext */
677 	out_len = aes_ctx->ac_remainder_len;
678 	out_len += plaintext->cd_length;
679 	out_len &= ~(AES_BLOCK_LEN - 1);
680 
681 	/* return length needed to store the output */
682 	if (ciphertext->cd_length < out_len) {
683 		ciphertext->cd_length = out_len;
684 		return (CRYPTO_BUFFER_TOO_SMALL);
685 	}
686 
687 	saved_offset = ciphertext->cd_offset;
688 	saved_length = ciphertext->cd_length;
689 
690 	/*
691 	 * Do the AES update on the specified input data.
692 	 */
693 	switch (plaintext->cd_format) {
694 	case CRYPTO_DATA_RAW:
695 		ret = crypto_update_iov(ctx->cc_provider_private,
696 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
697 		    aes_copy_block64);
698 		break;
699 	case CRYPTO_DATA_UIO:
700 		ret = crypto_update_uio(ctx->cc_provider_private,
701 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
702 		    aes_copy_block64);
703 		break;
704 	case CRYPTO_DATA_MBLK:
705 		ret = crypto_update_mp(ctx->cc_provider_private,
706 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
707 		    aes_copy_block64);
708 		break;
709 	default:
710 		ret = CRYPTO_ARGUMENTS_BAD;
711 	}
712 
713 	/*
714 	 * Since AES counter mode is a stream cipher, we call
715 	 * ctr_mode_final() to pick up any remaining bytes.
716 	 * It is an internal function that does not destroy
717 	 * the context like *normal* final routines.
718 	 */
719 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
720 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
721 		    ciphertext, aes_encrypt_block);
722 	}
723 
724 	if (ret == CRYPTO_SUCCESS) {
725 		if (plaintext != ciphertext)
726 			ciphertext->cd_length =
727 			    ciphertext->cd_offset - saved_offset;
728 	} else {
729 		ciphertext->cd_length = saved_length;
730 	}
731 	ciphertext->cd_offset = saved_offset;
732 
733 	return (ret);
734 }
735 
736 
737 static int
738 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
739     crypto_data_t *plaintext, crypto_req_handle_t req)
740 {
741 	off_t saved_offset;
742 	size_t saved_length, out_len;
743 	int ret = CRYPTO_SUCCESS;
744 	aes_ctx_t *aes_ctx;
745 
746 	ASSERT(ctx->cc_provider_private != NULL);
747 	aes_ctx = ctx->cc_provider_private;
748 
749 	AES_ARG_INPLACE(ciphertext, plaintext);
750 
751 	/*
752 	 * Compute number of bytes that will hold the plaintext.
753 	 * This is not necessary for CCM, GCM, and GMAC since these
754 	 * mechanisms never return plaintext for update operations.
755 	 */
756 	if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
757 		out_len = aes_ctx->ac_remainder_len;
758 		out_len += ciphertext->cd_length;
759 		out_len &= ~(AES_BLOCK_LEN - 1);
760 
761 		/* return length needed to store the output */
762 		if (plaintext->cd_length < out_len) {
763 			plaintext->cd_length = out_len;
764 			return (CRYPTO_BUFFER_TOO_SMALL);
765 		}
766 	}
767 
768 	saved_offset = plaintext->cd_offset;
769 	saved_length = plaintext->cd_length;
770 
771 	if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
772 		gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
773 
774 	/*
775 	 * Do the AES update on the specified input data.
776 	 */
777 	switch (ciphertext->cd_format) {
778 	case CRYPTO_DATA_RAW:
779 		ret = crypto_update_iov(ctx->cc_provider_private,
780 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
781 		    aes_copy_block64);
782 		break;
783 	case CRYPTO_DATA_UIO:
784 		ret = crypto_update_uio(ctx->cc_provider_private,
785 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
786 		    aes_copy_block64);
787 		break;
788 	case CRYPTO_DATA_MBLK:
789 		ret = crypto_update_mp(ctx->cc_provider_private,
790 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
791 		    aes_copy_block64);
792 		break;
793 	default:
794 		ret = CRYPTO_ARGUMENTS_BAD;
795 	}
796 
797 	/*
798 	 * Since AES counter mode is a stream cipher, we call
799 	 * ctr_mode_final() to pick up any remaining bytes.
800 	 * It is an internal function that does not destroy
801 	 * the context like *normal* final routines.
802 	 */
803 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
804 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
805 		    aes_encrypt_block);
806 		if (ret == CRYPTO_DATA_LEN_RANGE)
807 			ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
808 	}
809 
810 	if (ret == CRYPTO_SUCCESS) {
811 		if (ciphertext != plaintext)
812 			plaintext->cd_length =
813 			    plaintext->cd_offset - saved_offset;
814 	} else {
815 		plaintext->cd_length = saved_length;
816 	}
817 	plaintext->cd_offset = saved_offset;
818 
819 
820 	return (ret);
821 }
822 
823 /* ARGSUSED */
824 static int
825 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
826     crypto_req_handle_t req)
827 {
828 
829 /* EXPORT DELETE START */
830 
831 	aes_ctx_t *aes_ctx;
832 	int ret;
833 
834 	ASSERT(ctx->cc_provider_private != NULL);
835 	aes_ctx = ctx->cc_provider_private;
836 
837 	if (data->cd_format != CRYPTO_DATA_RAW &&
838 	    data->cd_format != CRYPTO_DATA_UIO &&
839 	    data->cd_format != CRYPTO_DATA_MBLK) {
840 		return (CRYPTO_ARGUMENTS_BAD);
841 	}
842 
843 	if (aes_ctx->ac_flags & CTR_MODE) {
844 		if (aes_ctx->ac_remainder_len > 0) {
845 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
846 			    aes_encrypt_block);
847 			if (ret != CRYPTO_SUCCESS)
848 				return (ret);
849 		}
850 	} else if (aes_ctx->ac_flags & CCM_MODE) {
851 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
852 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
853 		if (ret != CRYPTO_SUCCESS) {
854 			return (ret);
855 		}
856 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
857 		size_t saved_offset = data->cd_offset;
858 
859 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
860 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
861 		    aes_xor_block);
862 		if (ret != CRYPTO_SUCCESS) {
863 			return (ret);
864 		}
865 		data->cd_length = data->cd_offset - saved_offset;
866 		data->cd_offset = saved_offset;
867 	} else {
868 		/*
869 		 * There must be no unprocessed plaintext.
870 		 * This happens if the length of the last data is
871 		 * not a multiple of the AES block length.
872 		 */
873 		if (aes_ctx->ac_remainder_len > 0) {
874 			return (CRYPTO_DATA_LEN_RANGE);
875 		}
876 		data->cd_length = 0;
877 	}
878 
879 	(void) aes_free_context(ctx);
880 
881 /* EXPORT DELETE END */
882 
883 	return (CRYPTO_SUCCESS);
884 }
885 
886 /* ARGSUSED */
887 static int
888 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
889     crypto_req_handle_t req)
890 {
891 
892 /* EXPORT DELETE START */
893 
894 	aes_ctx_t *aes_ctx;
895 	int ret;
896 	off_t saved_offset;
897 	size_t saved_length;
898 
899 	ASSERT(ctx->cc_provider_private != NULL);
900 	aes_ctx = ctx->cc_provider_private;
901 
902 	if (data->cd_format != CRYPTO_DATA_RAW &&
903 	    data->cd_format != CRYPTO_DATA_UIO &&
904 	    data->cd_format != CRYPTO_DATA_MBLK) {
905 		return (CRYPTO_ARGUMENTS_BAD);
906 	}
907 
908 	/*
909 	 * There must be no unprocessed ciphertext.
910 	 * This happens if the length of the last ciphertext is
911 	 * not a multiple of the AES block length.
912 	 */
913 	if (aes_ctx->ac_remainder_len > 0) {
914 		if ((aes_ctx->ac_flags & CTR_MODE) == 0)
915 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
916 		else {
917 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
918 			    aes_encrypt_block);
919 			if (ret == CRYPTO_DATA_LEN_RANGE)
920 				ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
921 			if (ret != CRYPTO_SUCCESS)
922 				return (ret);
923 		}
924 	}
925 
926 	if (aes_ctx->ac_flags & CCM_MODE) {
927 		/*
928 		 * This is where all the plaintext is returned, make sure
929 		 * the plaintext buffer is big enough
930 		 */
931 		size_t pt_len = aes_ctx->ac_data_len;
932 		if (data->cd_length < pt_len) {
933 			data->cd_length = pt_len;
934 			return (CRYPTO_BUFFER_TOO_SMALL);
935 		}
936 
937 		ASSERT(aes_ctx->ac_processed_data_len == pt_len);
938 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
939 		saved_offset = data->cd_offset;
940 		saved_length = data->cd_length;
941 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
942 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
943 		    aes_xor_block);
944 		if (ret == CRYPTO_SUCCESS) {
945 			data->cd_length = data->cd_offset - saved_offset;
946 		} else {
947 			data->cd_length = saved_length;
948 		}
949 
950 		data->cd_offset = saved_offset;
951 		if (ret != CRYPTO_SUCCESS) {
952 			return (ret);
953 		}
954 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
955 		/*
956 		 * This is where all the plaintext is returned, make sure
957 		 * the plaintext buffer is big enough
958 		 */
959 		gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
960 		size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
961 
962 		if (data->cd_length < pt_len) {
963 			data->cd_length = pt_len;
964 			return (CRYPTO_BUFFER_TOO_SMALL);
965 		}
966 
967 		saved_offset = data->cd_offset;
968 		saved_length = data->cd_length;
969 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
970 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
971 		if (ret == CRYPTO_SUCCESS) {
972 			data->cd_length = data->cd_offset - saved_offset;
973 		} else {
974 			data->cd_length = saved_length;
975 		}
976 
977 		data->cd_offset = saved_offset;
978 		if (ret != CRYPTO_SUCCESS) {
979 			return (ret);
980 		}
981 	}
982 
983 
984 	if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
985 		data->cd_length = 0;
986 	}
987 
988 	(void) aes_free_context(ctx);
989 
990 /* EXPORT DELETE END */
991 
992 	return (CRYPTO_SUCCESS);
993 }
994 
995 /* ARGSUSED */
996 static int
997 aes_encrypt_atomic(crypto_provider_handle_t provider,
998     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
999     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1000     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1001 {
1002 	aes_ctx_t aes_ctx;	/* on the stack */
1003 	off_t saved_offset;
1004 	size_t saved_length;
1005 	size_t length_needed;
1006 	int ret;
1007 
1008 	AES_ARG_INPLACE(plaintext, ciphertext);
1009 
1010 	/*
1011 	 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
1012 	 * be a multiple of AES block size.
1013 	 */
1014 	switch (mechanism->cm_type) {
1015 	case AES_CTR_MECH_INFO_TYPE:
1016 	case AES_CCM_MECH_INFO_TYPE:
1017 	case AES_GCM_MECH_INFO_TYPE:
1018 	case AES_GMAC_MECH_INFO_TYPE:
1019 		break;
1020 	default:
1021 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1022 			return (CRYPTO_DATA_LEN_RANGE);
1023 	}
1024 
1025 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1026 		return (ret);
1027 
1028 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1029 
1030 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1031 	    crypto_kmflag(req), B_TRUE);
1032 	if (ret != CRYPTO_SUCCESS)
1033 		return (ret);
1034 
1035 	switch (mechanism->cm_type) {
1036 	case AES_CCM_MECH_INFO_TYPE:
1037 		length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1038 		break;
1039 	case AES_GMAC_MECH_INFO_TYPE:
1040 		if (plaintext->cd_length != 0)
1041 			return (CRYPTO_ARGUMENTS_BAD);
1042 		/* FALLTHRU */
1043 	case AES_GCM_MECH_INFO_TYPE:
1044 		length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1045 		break;
1046 	default:
1047 		length_needed = plaintext->cd_length;
1048 	}
1049 
1050 	/* return size of buffer needed to store output */
1051 	if (ciphertext->cd_length < length_needed) {
1052 		ciphertext->cd_length = length_needed;
1053 		ret = CRYPTO_BUFFER_TOO_SMALL;
1054 		goto out;
1055 	}
1056 
1057 	saved_offset = ciphertext->cd_offset;
1058 	saved_length = ciphertext->cd_length;
1059 
1060 	/*
1061 	 * Do an update on the specified input data.
1062 	 */
1063 	switch (plaintext->cd_format) {
1064 	case CRYPTO_DATA_RAW:
1065 		ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1066 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1067 		break;
1068 	case CRYPTO_DATA_UIO:
1069 		ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1070 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1071 		break;
1072 	case CRYPTO_DATA_MBLK:
1073 		ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1074 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1075 		break;
1076 	default:
1077 		ret = CRYPTO_ARGUMENTS_BAD;
1078 	}
1079 
1080 	if (ret == CRYPTO_SUCCESS) {
1081 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1082 			ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1083 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1084 			    aes_xor_block);
1085 			if (ret != CRYPTO_SUCCESS)
1086 				goto out;
1087 			ASSERT(aes_ctx.ac_remainder_len == 0);
1088 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1089 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1090 			ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1091 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1092 			    aes_copy_block, aes_xor_block);
1093 			if (ret != CRYPTO_SUCCESS)
1094 				goto out;
1095 			ASSERT(aes_ctx.ac_remainder_len == 0);
1096 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1097 			if (aes_ctx.ac_remainder_len > 0) {
1098 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1099 				    ciphertext, aes_encrypt_block);
1100 				if (ret != CRYPTO_SUCCESS)
1101 					goto out;
1102 			}
1103 		} else {
1104 			ASSERT(aes_ctx.ac_remainder_len == 0);
1105 		}
1106 
1107 		if (plaintext != ciphertext) {
1108 			ciphertext->cd_length =
1109 			    ciphertext->cd_offset - saved_offset;
1110 		}
1111 	} else {
1112 		ciphertext->cd_length = saved_length;
1113 	}
1114 	ciphertext->cd_offset = saved_offset;
1115 
1116 out:
1117 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1118 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1119 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1120 	}
1121 
1122 	return (ret);
1123 }
1124 
1125 /* ARGSUSED */
1126 static int
1127 aes_decrypt_atomic(crypto_provider_handle_t provider,
1128     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1129     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1130     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1131 {
1132 	aes_ctx_t aes_ctx;	/* on the stack */
1133 	off_t saved_offset;
1134 	size_t saved_length;
1135 	size_t length_needed;
1136 	int ret;
1137 
1138 	AES_ARG_INPLACE(ciphertext, plaintext);
1139 
1140 	/*
1141 	 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1142 	 * be a multiple of AES block size.
1143 	 */
1144 	switch (mechanism->cm_type) {
1145 	case AES_CTR_MECH_INFO_TYPE:
1146 	case AES_CCM_MECH_INFO_TYPE:
1147 	case AES_GCM_MECH_INFO_TYPE:
1148 	case AES_GMAC_MECH_INFO_TYPE:
1149 		break;
1150 	default:
1151 		if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1152 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1153 	}
1154 
1155 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1156 		return (ret);
1157 
1158 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1159 
1160 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1161 	    crypto_kmflag(req), B_FALSE);
1162 	if (ret != CRYPTO_SUCCESS)
1163 		return (ret);
1164 
1165 	switch (mechanism->cm_type) {
1166 	case AES_CCM_MECH_INFO_TYPE:
1167 		length_needed = aes_ctx.ac_data_len;
1168 		break;
1169 	case AES_GCM_MECH_INFO_TYPE:
1170 		length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1171 		break;
1172 	case AES_GMAC_MECH_INFO_TYPE:
1173 		if (plaintext->cd_length != 0)
1174 			return (CRYPTO_ARGUMENTS_BAD);
1175 		length_needed = 0;
1176 		break;
1177 	default:
1178 		length_needed = ciphertext->cd_length;
1179 	}
1180 
1181 	/* return size of buffer needed to store output */
1182 	if (plaintext->cd_length < length_needed) {
1183 		plaintext->cd_length = length_needed;
1184 		ret = CRYPTO_BUFFER_TOO_SMALL;
1185 		goto out;
1186 	}
1187 
1188 	saved_offset = plaintext->cd_offset;
1189 	saved_length = plaintext->cd_length;
1190 
1191 	if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1192 	    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1193 		gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1194 
1195 	/*
1196 	 * Do an update on the specified input data.
1197 	 */
1198 	switch (ciphertext->cd_format) {
1199 	case CRYPTO_DATA_RAW:
1200 		ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1201 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1202 		break;
1203 	case CRYPTO_DATA_UIO:
1204 		ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1205 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1206 		break;
1207 	case CRYPTO_DATA_MBLK:
1208 		ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1209 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1210 		break;
1211 	default:
1212 		ret = CRYPTO_ARGUMENTS_BAD;
1213 	}
1214 
1215 	if (ret == CRYPTO_SUCCESS) {
1216 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1217 			ASSERT(aes_ctx.ac_processed_data_len
1218 			    == aes_ctx.ac_data_len);
1219 			ASSERT(aes_ctx.ac_processed_mac_len
1220 			    == aes_ctx.ac_mac_len);
1221 			ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1222 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1223 			    aes_copy_block, aes_xor_block);
1224 			ASSERT(aes_ctx.ac_remainder_len == 0);
1225 			if ((ret == CRYPTO_SUCCESS) &&
1226 			    (ciphertext != plaintext)) {
1227 				plaintext->cd_length =
1228 				    plaintext->cd_offset - saved_offset;
1229 			} else {
1230 				plaintext->cd_length = saved_length;
1231 			}
1232 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1233 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1234 			ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1235 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1236 			    aes_xor_block);
1237 			ASSERT(aes_ctx.ac_remainder_len == 0);
1238 			if ((ret == CRYPTO_SUCCESS) &&
1239 			    (ciphertext != plaintext)) {
1240 				plaintext->cd_length =
1241 				    plaintext->cd_offset - saved_offset;
1242 			} else {
1243 				plaintext->cd_length = saved_length;
1244 			}
1245 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1246 			ASSERT(aes_ctx.ac_remainder_len == 0);
1247 			if (ciphertext != plaintext)
1248 				plaintext->cd_length =
1249 				    plaintext->cd_offset - saved_offset;
1250 		} else {
1251 			if (aes_ctx.ac_remainder_len > 0) {
1252 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1253 				    plaintext, aes_encrypt_block);
1254 				if (ret == CRYPTO_DATA_LEN_RANGE)
1255 					ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1256 				if (ret != CRYPTO_SUCCESS)
1257 					goto out;
1258 			}
1259 			if (ciphertext != plaintext)
1260 				plaintext->cd_length =
1261 				    plaintext->cd_offset - saved_offset;
1262 		}
1263 	} else {
1264 		plaintext->cd_length = saved_length;
1265 	}
1266 	plaintext->cd_offset = saved_offset;
1267 
1268 out:
1269 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1270 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1271 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1272 	}
1273 
1274 	if (aes_ctx.ac_flags & CCM_MODE) {
1275 		if (aes_ctx.ac_pt_buf != NULL) {
1276 			kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1277 		}
1278 	} else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1279 		if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1280 			kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1281 			    ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1282 		}
1283 	}
1284 
1285 	return (ret);
1286 }
1287 
1288 /*
1289  * KCF software provider context template entry points.
1290  */
1291 /* ARGSUSED */
1292 static int
1293 aes_create_ctx_template(crypto_provider_handle_t provider,
1294     crypto_mechanism_t *mechanism, crypto_key_t *key,
1295     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1296 {
1297 
1298 /* EXPORT DELETE START */
1299 
1300 	void *keysched;
1301 	size_t size;
1302 	int rv;
1303 
1304 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1305 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1306 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1307 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1308 	    mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1309 	    mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1310 		return (CRYPTO_MECHANISM_INVALID);
1311 
1312 	if ((keysched = aes_alloc_keysched(&size,
1313 	    crypto_kmflag(req))) == NULL) {
1314 		return (CRYPTO_HOST_MEMORY);
1315 	}
1316 
1317 	/*
1318 	 * Initialize key schedule.  Key length information is stored
1319 	 * in the key.
1320 	 */
1321 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1322 		bzero(keysched, size);
1323 		kmem_free(keysched, size);
1324 		return (rv);
1325 	}
1326 
1327 	*tmpl = keysched;
1328 	*tmpl_size = size;
1329 
1330 /* EXPORT DELETE END */
1331 
1332 	return (CRYPTO_SUCCESS);
1333 }
1334 
1335 
1336 static int
1337 aes_free_context(crypto_ctx_t *ctx)
1338 {
1339 
1340 /* EXPORT DELETE START */
1341 
1342 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1343 
1344 	if (aes_ctx != NULL) {
1345 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1346 			ASSERT(aes_ctx->ac_keysched_len != 0);
1347 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1348 			kmem_free(aes_ctx->ac_keysched,
1349 			    aes_ctx->ac_keysched_len);
1350 		}
1351 		crypto_free_mode_ctx(aes_ctx);
1352 		ctx->cc_provider_private = NULL;
1353 	}
1354 
1355 /* EXPORT DELETE END */
1356 
1357 	return (CRYPTO_SUCCESS);
1358 }
1359 
1360 
1361 static int
1362 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1363     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1364     boolean_t is_encrypt_init)
1365 {
1366 	int rv = CRYPTO_SUCCESS;
1367 
1368 /* EXPORT DELETE START */
1369 
1370 	void *keysched;
1371 	size_t size;
1372 
1373 	if (template == NULL) {
1374 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1375 			return (CRYPTO_HOST_MEMORY);
1376 		/*
1377 		 * Initialize key schedule.
1378 		 * Key length is stored in the key.
1379 		 */
1380 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1381 			kmem_free(keysched, size);
1382 			return (rv);
1383 		}
1384 
1385 		aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1386 		aes_ctx->ac_keysched_len = size;
1387 	} else {
1388 		keysched = template;
1389 	}
1390 	aes_ctx->ac_keysched = keysched;
1391 
1392 	switch (mechanism->cm_type) {
1393 	case AES_CBC_MECH_INFO_TYPE:
1394 		rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1395 		    mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1396 		break;
1397 	case AES_CTR_MECH_INFO_TYPE: {
1398 		CK_AES_CTR_PARAMS *pp;
1399 
1400 		if (mechanism->cm_param == NULL ||
1401 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1402 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1403 		}
1404 		pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1405 		rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1406 		    pp->cb, aes_copy_block);
1407 		break;
1408 	}
1409 	case AES_CCM_MECH_INFO_TYPE:
1410 		if (mechanism->cm_param == NULL ||
1411 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1412 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1413 		}
1414 		rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1415 		    kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1416 		    aes_xor_block);
1417 		break;
1418 	case AES_GCM_MECH_INFO_TYPE:
1419 		if (mechanism->cm_param == NULL ||
1420 		    mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1421 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1422 		}
1423 		rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1424 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1425 		    aes_xor_block);
1426 		break;
1427 	case AES_GMAC_MECH_INFO_TYPE:
1428 		if (mechanism->cm_param == NULL ||
1429 		    mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1430 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1431 		}
1432 		rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1433 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1434 		    aes_xor_block);
1435 		break;
1436 	case AES_ECB_MECH_INFO_TYPE:
1437 		aes_ctx->ac_flags |= ECB_MODE;
1438 	}
1439 
1440 	if (rv != CRYPTO_SUCCESS) {
1441 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1442 			bzero(keysched, size);
1443 			kmem_free(keysched, size);
1444 		}
1445 	}
1446 
1447 /* EXPORT DELETE END */
1448 
1449 	return (rv);
1450 }
1451 
1452 static int
1453 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1454     CK_AES_GCM_PARAMS *gcm_params)
1455 {
1456 	/* LINTED: pointer alignment */
1457 	CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1458 
1459 	if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1460 		return (CRYPTO_MECHANISM_INVALID);
1461 
1462 	if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1463 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1464 
1465 	if (params->pIv == NULL)
1466 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1467 
1468 	gcm_params->pIv = params->pIv;
1469 	gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1470 	gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1471 
1472 	if (data == NULL)
1473 		return (CRYPTO_SUCCESS);
1474 
1475 	if (data->cd_format != CRYPTO_DATA_RAW)
1476 		return (CRYPTO_ARGUMENTS_BAD);
1477 
1478 	gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1479 	gcm_params->ulAADLen = data->cd_length;
1480 	return (CRYPTO_SUCCESS);
1481 }
1482 
1483 static int
1484 aes_mac_atomic(crypto_provider_handle_t provider,
1485     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1486     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1487     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1488 {
1489 	CK_AES_GCM_PARAMS gcm_params;
1490 	crypto_mechanism_t gcm_mech;
1491 	int rv;
1492 
1493 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1494 	    != CRYPTO_SUCCESS)
1495 		return (rv);
1496 
1497 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1498 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1499 	gcm_mech.cm_param = (char *)&gcm_params;
1500 
1501 	return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1502 	    key, &null_crypto_data, mac, template, req));
1503 }
1504 
1505 static int
1506 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1507     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1508     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1509     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1510 {
1511 	CK_AES_GCM_PARAMS gcm_params;
1512 	crypto_mechanism_t gcm_mech;
1513 	int rv;
1514 
1515 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1516 	    != CRYPTO_SUCCESS)
1517 		return (rv);
1518 
1519 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1520 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1521 	gcm_mech.cm_param = (char *)&gcm_params;
1522 
1523 	return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1524 	    key, mac, &null_crypto_data, template, req));
1525 }
1526