xref: /titanic_51/usr/src/uts/common/crypto/io/aes.c (revision bbaa8b60dd95d714741fc474adad3cf710ef4efd)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23  */
24 
25 /*
26  * AES provider for the Kernel Cryptographic Framework (KCF)
27  */
28 
29 #include <sys/types.h>
30 #include <sys/systm.h>
31 #include <sys/modctl.h>
32 #include <sys/cmn_err.h>
33 #include <sys/ddi.h>
34 #include <sys/crypto/common.h>
35 #include <sys/crypto/impl.h>
36 #include <sys/crypto/spi.h>
37 #include <sys/sysmacros.h>
38 #include <sys/strsun.h>
39 #include <modes/modes.h>
40 #define	_AES_IMPL
41 #include <aes/aes_impl.h>
42 
43 extern struct mod_ops mod_cryptoops;
44 
45 /*
46  * Module linkage information for the kernel.
47  */
48 static struct modlcrypto modlcrypto = {
49 	&mod_cryptoops,
50 	"AES Kernel SW Provider"
51 };
52 
53 static struct modlinkage modlinkage = {
54 	MODREV_1,
55 	(void *)&modlcrypto,
56 	NULL
57 };
58 
59 /*
60  * The following definitions are to keep EXPORT_SRC happy.
61  */
62 #ifndef AES_MIN_KEY_BYTES
63 #define	AES_MIN_KEY_BYTES		0
64 #endif
65 
66 #ifndef AES_MAX_KEY_BYTES
67 #define	AES_MAX_KEY_BYTES		0
68 #endif
69 
70 /*
71  * Mechanism info structure passed to KCF during registration.
72  */
73 static crypto_mech_info_t aes_mech_info_tab[] = {
74 	/* AES_ECB */
75 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
76 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
77 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
78 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
79 	/* AES_CBC */
80 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
81 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
82 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
83 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
84 	/* AES_CTR */
85 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
86 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
87 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
88 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
89 	/* AES_CCM */
90 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
91 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
92 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
93 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
94 	/* AES_GCM */
95 	{SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
96 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
97 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
98 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 	/* AES_GMAC */
100 	{SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
101 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
102 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
103 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
104 	    CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
105 	    CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
106 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
107 };
108 
109 /* operations are in-place if the output buffer is NULL */
110 #define	AES_ARG_INPLACE(input, output)				\
111 	if ((output) == NULL)					\
112 		(output) = (input);
113 
114 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
115 
116 static crypto_control_ops_t aes_control_ops = {
117 	aes_provider_status
118 };
119 
120 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
121     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
122 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
123     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
124 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
125     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
126 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
127     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
128 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
129     crypto_req_handle_t);
130 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
131     crypto_req_handle_t);
132 
133 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
134     crypto_req_handle_t);
135 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
136     crypto_data_t *, crypto_req_handle_t);
137 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
138     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
139     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
140 
141 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
142     crypto_req_handle_t);
143 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
144     crypto_data_t *, crypto_req_handle_t);
145 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
146     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
147     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
148 
149 static crypto_cipher_ops_t aes_cipher_ops = {
150 	aes_encrypt_init,
151 	aes_encrypt,
152 	aes_encrypt_update,
153 	aes_encrypt_final,
154 	aes_encrypt_atomic,
155 	aes_decrypt_init,
156 	aes_decrypt,
157 	aes_decrypt_update,
158 	aes_decrypt_final,
159 	aes_decrypt_atomic
160 };
161 
162 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
163     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
164     crypto_spi_ctx_template_t, crypto_req_handle_t);
165 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
166     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
167     crypto_spi_ctx_template_t, crypto_req_handle_t);
168 
169 static crypto_mac_ops_t aes_mac_ops = {
170 	NULL,
171 	NULL,
172 	NULL,
173 	NULL,
174 	aes_mac_atomic,
175 	aes_mac_verify_atomic
176 };
177 
178 static int aes_create_ctx_template(crypto_provider_handle_t,
179     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
180     size_t *, crypto_req_handle_t);
181 static int aes_free_context(crypto_ctx_t *);
182 
183 static crypto_ctx_ops_t aes_ctx_ops = {
184 	aes_create_ctx_template,
185 	aes_free_context
186 };
187 
188 static crypto_ops_t aes_crypto_ops = {
189 	&aes_control_ops,
190 	NULL,
191 	&aes_cipher_ops,
192 	&aes_mac_ops,
193 	NULL,
194 	NULL,
195 	NULL,
196 	NULL,
197 	NULL,
198 	NULL,
199 	NULL,
200 	NULL,
201 	NULL,
202 	&aes_ctx_ops,
203 	NULL,
204 	NULL,
205 	NULL,
206 };
207 
208 static crypto_provider_info_t aes_prov_info = {
209 	CRYPTO_SPI_VERSION_4,
210 	"AES Software Provider",
211 	CRYPTO_SW_PROVIDER,
212 	{&modlinkage},
213 	NULL,
214 	&aes_crypto_ops,
215 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
216 	aes_mech_info_tab
217 };
218 
219 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
220 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
221 
222 int
223 _init(void)
224 {
225 	int ret;
226 
227 	if ((ret = mod_install(&modlinkage)) != 0)
228 		return (ret);
229 
230 	/* Register with KCF.  If the registration fails, remove the module. */
231 	if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
232 		(void) mod_remove(&modlinkage);
233 		return (EACCES);
234 	}
235 
236 	return (0);
237 }
238 
239 int
240 _fini(void)
241 {
242 	/* Unregister from KCF if module is registered */
243 	if (aes_prov_handle != NULL) {
244 		if (crypto_unregister_provider(aes_prov_handle))
245 			return (EBUSY);
246 
247 		aes_prov_handle = NULL;
248 	}
249 
250 	return (mod_remove(&modlinkage));
251 }
252 
253 int
254 _info(struct modinfo *modinfop)
255 {
256 	return (mod_info(&modlinkage, modinfop));
257 }
258 
259 
260 static int
261 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
262 {
263 	void *p = NULL;
264 	boolean_t param_required = B_TRUE;
265 	size_t param_len;
266 	void *(*alloc_fun)(int);
267 	int rv = CRYPTO_SUCCESS;
268 
269 	switch (mechanism->cm_type) {
270 	case AES_ECB_MECH_INFO_TYPE:
271 		param_required = B_FALSE;
272 		alloc_fun = ecb_alloc_ctx;
273 		break;
274 	case AES_CBC_MECH_INFO_TYPE:
275 		param_len = AES_BLOCK_LEN;
276 		alloc_fun = cbc_alloc_ctx;
277 		break;
278 	case AES_CTR_MECH_INFO_TYPE:
279 		param_len = sizeof (CK_AES_CTR_PARAMS);
280 		alloc_fun = ctr_alloc_ctx;
281 		break;
282 	case AES_CCM_MECH_INFO_TYPE:
283 		param_len = sizeof (CK_AES_CCM_PARAMS);
284 		alloc_fun = ccm_alloc_ctx;
285 		break;
286 	case AES_GCM_MECH_INFO_TYPE:
287 		param_len = sizeof (CK_AES_GCM_PARAMS);
288 		alloc_fun = gcm_alloc_ctx;
289 		break;
290 	case AES_GMAC_MECH_INFO_TYPE:
291 		param_len = sizeof (CK_AES_GMAC_PARAMS);
292 		alloc_fun = gmac_alloc_ctx;
293 		break;
294 	default:
295 		rv = CRYPTO_MECHANISM_INVALID;
296 		return (rv);
297 	}
298 	if (param_required && mechanism->cm_param != NULL &&
299 	    mechanism->cm_param_len != param_len) {
300 		rv = CRYPTO_MECHANISM_PARAM_INVALID;
301 	}
302 	if (ctx != NULL) {
303 		p = (alloc_fun)(kmflag);
304 		*ctx = p;
305 	}
306 	return (rv);
307 }
308 
309 /*
310  * Initialize key schedules for AES
311  */
312 static int
313 init_keysched(crypto_key_t *key, void *newbie)
314 {
315 	/*
316 	 * Only keys by value are supported by this module.
317 	 */
318 	switch (key->ck_format) {
319 	case CRYPTO_KEY_RAW:
320 		if (key->ck_length < AES_MINBITS ||
321 		    key->ck_length > AES_MAXBITS) {
322 			return (CRYPTO_KEY_SIZE_RANGE);
323 		}
324 
325 		/* key length must be either 128, 192, or 256 */
326 		if ((key->ck_length & 63) != 0)
327 			return (CRYPTO_KEY_SIZE_RANGE);
328 		break;
329 	default:
330 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
331 	}
332 
333 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
334 	return (CRYPTO_SUCCESS);
335 }
336 
337 /*
338  * KCF software provider control entry points.
339  */
340 /* ARGSUSED */
341 static void
342 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
343 {
344 	*status = CRYPTO_PROVIDER_READY;
345 }
346 
347 static int
348 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
349     crypto_key_t *key, crypto_spi_ctx_template_t template,
350     crypto_req_handle_t req) {
351 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
352 }
353 
354 static int
355 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
356     crypto_key_t *key, crypto_spi_ctx_template_t template,
357     crypto_req_handle_t req) {
358 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
359 }
360 
361 
362 
363 /*
364  * KCF software provider encrypt entry points.
365  */
366 static int
367 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
368     crypto_key_t *key, crypto_spi_ctx_template_t template,
369     crypto_req_handle_t req, boolean_t is_encrypt_init)
370 {
371 	aes_ctx_t *aes_ctx;
372 	int rv;
373 	int kmflag;
374 
375 	/*
376 	 * Only keys by value are supported by this module.
377 	 */
378 	if (key->ck_format != CRYPTO_KEY_RAW) {
379 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
380 	}
381 
382 	kmflag = crypto_kmflag(req);
383 	if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
384 	    != CRYPTO_SUCCESS)
385 		return (rv);
386 
387 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
388 	    is_encrypt_init);
389 	if (rv != CRYPTO_SUCCESS) {
390 		crypto_free_mode_ctx(aes_ctx);
391 		return (rv);
392 	}
393 
394 	ctx->cc_provider_private = aes_ctx;
395 
396 	return (CRYPTO_SUCCESS);
397 }
398 
399 static void
400 aes_copy_block64(uint8_t *in, uint64_t *out)
401 {
402 	if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
403 		/* LINTED: pointer alignment */
404 		out[0] = *(uint64_t *)&in[0];
405 		/* LINTED: pointer alignment */
406 		out[1] = *(uint64_t *)&in[8];
407 	} else {
408 		uint8_t *iv8 = (uint8_t *)&out[0];
409 
410 		AES_COPY_BLOCK(in, iv8);
411 	}
412 }
413 
414 
415 static int
416 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
417     crypto_data_t *ciphertext, crypto_req_handle_t req)
418 {
419 	int ret = CRYPTO_FAILED;
420 
421 	aes_ctx_t *aes_ctx;
422 	size_t saved_length, saved_offset, length_needed;
423 
424 	ASSERT(ctx->cc_provider_private != NULL);
425 	aes_ctx = ctx->cc_provider_private;
426 
427 	/*
428 	 * For block ciphers, plaintext must be a multiple of AES block size.
429 	 * This test is only valid for ciphers whose blocksize is a power of 2.
430 	 */
431 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
432 	    == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
433 		return (CRYPTO_DATA_LEN_RANGE);
434 
435 	AES_ARG_INPLACE(plaintext, ciphertext);
436 
437 	/*
438 	 * We need to just return the length needed to store the output.
439 	 * We should not destroy the context for the following case.
440 	 */
441 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
442 	case CCM_MODE:
443 		length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
444 		break;
445 	case GCM_MODE:
446 		length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
447 		break;
448 	case GMAC_MODE:
449 		if (plaintext->cd_length != 0)
450 			return (CRYPTO_ARGUMENTS_BAD);
451 
452 		length_needed = aes_ctx->ac_tag_len;
453 		break;
454 	default:
455 		length_needed = plaintext->cd_length;
456 	}
457 
458 	if (ciphertext->cd_length < length_needed) {
459 		ciphertext->cd_length = length_needed;
460 		return (CRYPTO_BUFFER_TOO_SMALL);
461 	}
462 
463 	saved_length = ciphertext->cd_length;
464 	saved_offset = ciphertext->cd_offset;
465 
466 	/*
467 	 * Do an update on the specified input data.
468 	 */
469 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
470 	if (ret != CRYPTO_SUCCESS) {
471 		return (ret);
472 	}
473 
474 	/*
475 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
476 	 * left-over unprocessed data, and compute the MAC
477 	 */
478 	if (aes_ctx->ac_flags & CCM_MODE) {
479 		/*
480 		 * ccm_encrypt_final() will compute the MAC and append
481 		 * it to existing ciphertext. So, need to adjust the left over
482 		 * length value accordingly
483 		 */
484 
485 		/* order of following 2 lines MUST not be reversed */
486 		ciphertext->cd_offset = ciphertext->cd_length;
487 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
488 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
489 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
490 		if (ret != CRYPTO_SUCCESS) {
491 			return (ret);
492 		}
493 
494 		if (plaintext != ciphertext) {
495 			ciphertext->cd_length =
496 			    ciphertext->cd_offset - saved_offset;
497 		}
498 		ciphertext->cd_offset = saved_offset;
499 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
500 		/*
501 		 * gcm_encrypt_final() will compute the MAC and append
502 		 * it to existing ciphertext. So, need to adjust the left over
503 		 * length value accordingly
504 		 */
505 
506 		/* order of following 2 lines MUST not be reversed */
507 		ciphertext->cd_offset = ciphertext->cd_length;
508 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
509 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
510 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
511 		    aes_xor_block);
512 		if (ret != CRYPTO_SUCCESS) {
513 			return (ret);
514 		}
515 
516 		if (plaintext != ciphertext) {
517 			ciphertext->cd_length =
518 			    ciphertext->cd_offset - saved_offset;
519 		}
520 		ciphertext->cd_offset = saved_offset;
521 	}
522 
523 	ASSERT(aes_ctx->ac_remainder_len == 0);
524 	(void) aes_free_context(ctx);
525 
526 	return (ret);
527 }
528 
529 
530 static int
531 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
532     crypto_data_t *plaintext, crypto_req_handle_t req)
533 {
534 	int ret = CRYPTO_FAILED;
535 
536 	aes_ctx_t *aes_ctx;
537 	off_t saved_offset;
538 	size_t saved_length, length_needed;
539 
540 	ASSERT(ctx->cc_provider_private != NULL);
541 	aes_ctx = ctx->cc_provider_private;
542 
543 	/*
544 	 * For block ciphers, plaintext must be a multiple of AES block size.
545 	 * This test is only valid for ciphers whose blocksize is a power of 2.
546 	 */
547 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
548 	    == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
549 		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
550 	}
551 
552 	AES_ARG_INPLACE(ciphertext, plaintext);
553 
554 	/*
555 	 * Return length needed to store the output.
556 	 * Do not destroy context when plaintext buffer is too small.
557 	 *
558 	 * CCM:  plaintext is MAC len smaller than cipher text
559 	 * GCM:  plaintext is TAG len smaller than cipher text
560 	 * GMAC: plaintext length must be zero
561 	 */
562 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
563 	case CCM_MODE:
564 		length_needed = aes_ctx->ac_processed_data_len;
565 		break;
566 	case GCM_MODE:
567 		length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
568 		break;
569 	case GMAC_MODE:
570 		if (plaintext->cd_length != 0)
571 			return (CRYPTO_ARGUMENTS_BAD);
572 
573 		length_needed = 0;
574 		break;
575 	default:
576 		length_needed = ciphertext->cd_length;
577 	}
578 
579 	if (plaintext->cd_length < length_needed) {
580 		plaintext->cd_length = length_needed;
581 		return (CRYPTO_BUFFER_TOO_SMALL);
582 	}
583 
584 	saved_offset = plaintext->cd_offset;
585 	saved_length = plaintext->cd_length;
586 
587 	/*
588 	 * Do an update on the specified input data.
589 	 */
590 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
591 	if (ret != CRYPTO_SUCCESS) {
592 		goto cleanup;
593 	}
594 
595 	if (aes_ctx->ac_flags & CCM_MODE) {
596 		ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
597 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
598 
599 		/* order of following 2 lines MUST not be reversed */
600 		plaintext->cd_offset = plaintext->cd_length;
601 		plaintext->cd_length = saved_length - plaintext->cd_length;
602 
603 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
604 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
605 		    aes_xor_block);
606 		if (ret == CRYPTO_SUCCESS) {
607 			if (plaintext != ciphertext) {
608 				plaintext->cd_length =
609 				    plaintext->cd_offset - saved_offset;
610 			}
611 		} else {
612 			plaintext->cd_length = saved_length;
613 		}
614 
615 		plaintext->cd_offset = saved_offset;
616 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
617 		/* order of following 2 lines MUST not be reversed */
618 		plaintext->cd_offset = plaintext->cd_length;
619 		plaintext->cd_length = saved_length - plaintext->cd_length;
620 
621 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
622 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
623 		if (ret == CRYPTO_SUCCESS) {
624 			if (plaintext != ciphertext) {
625 				plaintext->cd_length =
626 				    plaintext->cd_offset - saved_offset;
627 			}
628 		} else {
629 			plaintext->cd_length = saved_length;
630 		}
631 
632 		plaintext->cd_offset = saved_offset;
633 	}
634 
635 	ASSERT(aes_ctx->ac_remainder_len == 0);
636 
637 cleanup:
638 	(void) aes_free_context(ctx);
639 
640 	return (ret);
641 }
642 
643 
644 /* ARGSUSED */
645 static int
646 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
647     crypto_data_t *ciphertext, crypto_req_handle_t req)
648 {
649 	off_t saved_offset;
650 	size_t saved_length, out_len;
651 	int ret = CRYPTO_SUCCESS;
652 	aes_ctx_t *aes_ctx;
653 
654 	ASSERT(ctx->cc_provider_private != NULL);
655 	aes_ctx = ctx->cc_provider_private;
656 
657 	AES_ARG_INPLACE(plaintext, ciphertext);
658 
659 	/* compute number of bytes that will hold the ciphertext */
660 	out_len = aes_ctx->ac_remainder_len;
661 	out_len += plaintext->cd_length;
662 	out_len &= ~(AES_BLOCK_LEN - 1);
663 
664 	/* return length needed to store the output */
665 	if (ciphertext->cd_length < out_len) {
666 		ciphertext->cd_length = out_len;
667 		return (CRYPTO_BUFFER_TOO_SMALL);
668 	}
669 
670 	saved_offset = ciphertext->cd_offset;
671 	saved_length = ciphertext->cd_length;
672 
673 	/*
674 	 * Do the AES update on the specified input data.
675 	 */
676 	switch (plaintext->cd_format) {
677 	case CRYPTO_DATA_RAW:
678 		ret = crypto_update_iov(ctx->cc_provider_private,
679 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
680 		    aes_copy_block64);
681 		break;
682 	case CRYPTO_DATA_UIO:
683 		ret = crypto_update_uio(ctx->cc_provider_private,
684 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
685 		    aes_copy_block64);
686 		break;
687 	case CRYPTO_DATA_MBLK:
688 		ret = crypto_update_mp(ctx->cc_provider_private,
689 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
690 		    aes_copy_block64);
691 		break;
692 	default:
693 		ret = CRYPTO_ARGUMENTS_BAD;
694 	}
695 
696 	/*
697 	 * Since AES counter mode is a stream cipher, we call
698 	 * ctr_mode_final() to pick up any remaining bytes.
699 	 * It is an internal function that does not destroy
700 	 * the context like *normal* final routines.
701 	 */
702 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
703 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
704 		    ciphertext, aes_encrypt_block);
705 	}
706 
707 	if (ret == CRYPTO_SUCCESS) {
708 		if (plaintext != ciphertext)
709 			ciphertext->cd_length =
710 			    ciphertext->cd_offset - saved_offset;
711 	} else {
712 		ciphertext->cd_length = saved_length;
713 	}
714 	ciphertext->cd_offset = saved_offset;
715 
716 	return (ret);
717 }
718 
719 
720 static int
721 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
722     crypto_data_t *plaintext, crypto_req_handle_t req)
723 {
724 	off_t saved_offset;
725 	size_t saved_length, out_len;
726 	int ret = CRYPTO_SUCCESS;
727 	aes_ctx_t *aes_ctx;
728 
729 	ASSERT(ctx->cc_provider_private != NULL);
730 	aes_ctx = ctx->cc_provider_private;
731 
732 	AES_ARG_INPLACE(ciphertext, plaintext);
733 
734 	/*
735 	 * Compute number of bytes that will hold the plaintext.
736 	 * This is not necessary for CCM, GCM, and GMAC since these
737 	 * mechanisms never return plaintext for update operations.
738 	 */
739 	if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
740 		out_len = aes_ctx->ac_remainder_len;
741 		out_len += ciphertext->cd_length;
742 		out_len &= ~(AES_BLOCK_LEN - 1);
743 
744 		/* return length needed to store the output */
745 		if (plaintext->cd_length < out_len) {
746 			plaintext->cd_length = out_len;
747 			return (CRYPTO_BUFFER_TOO_SMALL);
748 		}
749 	}
750 
751 	saved_offset = plaintext->cd_offset;
752 	saved_length = plaintext->cd_length;
753 
754 	if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
755 		gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
756 
757 	/*
758 	 * Do the AES update on the specified input data.
759 	 */
760 	switch (ciphertext->cd_format) {
761 	case CRYPTO_DATA_RAW:
762 		ret = crypto_update_iov(ctx->cc_provider_private,
763 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
764 		    aes_copy_block64);
765 		break;
766 	case CRYPTO_DATA_UIO:
767 		ret = crypto_update_uio(ctx->cc_provider_private,
768 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
769 		    aes_copy_block64);
770 		break;
771 	case CRYPTO_DATA_MBLK:
772 		ret = crypto_update_mp(ctx->cc_provider_private,
773 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
774 		    aes_copy_block64);
775 		break;
776 	default:
777 		ret = CRYPTO_ARGUMENTS_BAD;
778 	}
779 
780 	/*
781 	 * Since AES counter mode is a stream cipher, we call
782 	 * ctr_mode_final() to pick up any remaining bytes.
783 	 * It is an internal function that does not destroy
784 	 * the context like *normal* final routines.
785 	 */
786 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
787 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
788 		    aes_encrypt_block);
789 		if (ret == CRYPTO_DATA_LEN_RANGE)
790 			ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
791 	}
792 
793 	if (ret == CRYPTO_SUCCESS) {
794 		if (ciphertext != plaintext)
795 			plaintext->cd_length =
796 			    plaintext->cd_offset - saved_offset;
797 	} else {
798 		plaintext->cd_length = saved_length;
799 	}
800 	plaintext->cd_offset = saved_offset;
801 
802 
803 	return (ret);
804 }
805 
806 /* ARGSUSED */
807 static int
808 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
809     crypto_req_handle_t req)
810 {
811 	aes_ctx_t *aes_ctx;
812 	int ret;
813 
814 	ASSERT(ctx->cc_provider_private != NULL);
815 	aes_ctx = ctx->cc_provider_private;
816 
817 	if (data->cd_format != CRYPTO_DATA_RAW &&
818 	    data->cd_format != CRYPTO_DATA_UIO &&
819 	    data->cd_format != CRYPTO_DATA_MBLK) {
820 		return (CRYPTO_ARGUMENTS_BAD);
821 	}
822 
823 	if (aes_ctx->ac_flags & CTR_MODE) {
824 		if (aes_ctx->ac_remainder_len > 0) {
825 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
826 			    aes_encrypt_block);
827 			if (ret != CRYPTO_SUCCESS)
828 				return (ret);
829 		}
830 	} else if (aes_ctx->ac_flags & CCM_MODE) {
831 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
832 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
833 		if (ret != CRYPTO_SUCCESS) {
834 			return (ret);
835 		}
836 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
837 		size_t saved_offset = data->cd_offset;
838 
839 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
840 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
841 		    aes_xor_block);
842 		if (ret != CRYPTO_SUCCESS) {
843 			return (ret);
844 		}
845 		data->cd_length = data->cd_offset - saved_offset;
846 		data->cd_offset = saved_offset;
847 	} else {
848 		/*
849 		 * There must be no unprocessed plaintext.
850 		 * This happens if the length of the last data is
851 		 * not a multiple of the AES block length.
852 		 */
853 		if (aes_ctx->ac_remainder_len > 0) {
854 			return (CRYPTO_DATA_LEN_RANGE);
855 		}
856 		data->cd_length = 0;
857 	}
858 
859 	(void) aes_free_context(ctx);
860 
861 	return (CRYPTO_SUCCESS);
862 }
863 
864 /* ARGSUSED */
865 static int
866 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
867     crypto_req_handle_t req)
868 {
869 	aes_ctx_t *aes_ctx;
870 	int ret;
871 	off_t saved_offset;
872 	size_t saved_length;
873 
874 	ASSERT(ctx->cc_provider_private != NULL);
875 	aes_ctx = ctx->cc_provider_private;
876 
877 	if (data->cd_format != CRYPTO_DATA_RAW &&
878 	    data->cd_format != CRYPTO_DATA_UIO &&
879 	    data->cd_format != CRYPTO_DATA_MBLK) {
880 		return (CRYPTO_ARGUMENTS_BAD);
881 	}
882 
883 	/*
884 	 * There must be no unprocessed ciphertext.
885 	 * This happens if the length of the last ciphertext is
886 	 * not a multiple of the AES block length.
887 	 */
888 	if (aes_ctx->ac_remainder_len > 0) {
889 		if ((aes_ctx->ac_flags & CTR_MODE) == 0)
890 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
891 		else {
892 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
893 			    aes_encrypt_block);
894 			if (ret == CRYPTO_DATA_LEN_RANGE)
895 				ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
896 			if (ret != CRYPTO_SUCCESS)
897 				return (ret);
898 		}
899 	}
900 
901 	if (aes_ctx->ac_flags & CCM_MODE) {
902 		/*
903 		 * This is where all the plaintext is returned, make sure
904 		 * the plaintext buffer is big enough
905 		 */
906 		size_t pt_len = aes_ctx->ac_data_len;
907 		if (data->cd_length < pt_len) {
908 			data->cd_length = pt_len;
909 			return (CRYPTO_BUFFER_TOO_SMALL);
910 		}
911 
912 		ASSERT(aes_ctx->ac_processed_data_len == pt_len);
913 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
914 		saved_offset = data->cd_offset;
915 		saved_length = data->cd_length;
916 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
917 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
918 		    aes_xor_block);
919 		if (ret == CRYPTO_SUCCESS) {
920 			data->cd_length = data->cd_offset - saved_offset;
921 		} else {
922 			data->cd_length = saved_length;
923 		}
924 
925 		data->cd_offset = saved_offset;
926 		if (ret != CRYPTO_SUCCESS) {
927 			return (ret);
928 		}
929 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
930 		/*
931 		 * This is where all the plaintext is returned, make sure
932 		 * the plaintext buffer is big enough
933 		 */
934 		gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
935 		size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
936 
937 		if (data->cd_length < pt_len) {
938 			data->cd_length = pt_len;
939 			return (CRYPTO_BUFFER_TOO_SMALL);
940 		}
941 
942 		saved_offset = data->cd_offset;
943 		saved_length = data->cd_length;
944 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
945 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
946 		if (ret == CRYPTO_SUCCESS) {
947 			data->cd_length = data->cd_offset - saved_offset;
948 		} else {
949 			data->cd_length = saved_length;
950 		}
951 
952 		data->cd_offset = saved_offset;
953 		if (ret != CRYPTO_SUCCESS) {
954 			return (ret);
955 		}
956 	}
957 
958 
959 	if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
960 		data->cd_length = 0;
961 	}
962 
963 	(void) aes_free_context(ctx);
964 
965 	return (CRYPTO_SUCCESS);
966 }
967 
968 /* ARGSUSED */
969 static int
970 aes_encrypt_atomic(crypto_provider_handle_t provider,
971     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
972     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
973     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
974 {
975 	aes_ctx_t aes_ctx;	/* on the stack */
976 	off_t saved_offset;
977 	size_t saved_length;
978 	size_t length_needed;
979 	int ret;
980 
981 	AES_ARG_INPLACE(plaintext, ciphertext);
982 
983 	/*
984 	 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
985 	 * be a multiple of AES block size.
986 	 */
987 	switch (mechanism->cm_type) {
988 	case AES_CTR_MECH_INFO_TYPE:
989 	case AES_CCM_MECH_INFO_TYPE:
990 	case AES_GCM_MECH_INFO_TYPE:
991 	case AES_GMAC_MECH_INFO_TYPE:
992 		break;
993 	default:
994 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
995 			return (CRYPTO_DATA_LEN_RANGE);
996 	}
997 
998 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
999 		return (ret);
1000 
1001 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1002 
1003 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1004 	    crypto_kmflag(req), B_TRUE);
1005 	if (ret != CRYPTO_SUCCESS)
1006 		return (ret);
1007 
1008 	switch (mechanism->cm_type) {
1009 	case AES_CCM_MECH_INFO_TYPE:
1010 		length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1011 		break;
1012 	case AES_GMAC_MECH_INFO_TYPE:
1013 		if (plaintext->cd_length != 0)
1014 			return (CRYPTO_ARGUMENTS_BAD);
1015 		/* FALLTHRU */
1016 	case AES_GCM_MECH_INFO_TYPE:
1017 		length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1018 		break;
1019 	default:
1020 		length_needed = plaintext->cd_length;
1021 	}
1022 
1023 	/* return size of buffer needed to store output */
1024 	if (ciphertext->cd_length < length_needed) {
1025 		ciphertext->cd_length = length_needed;
1026 		ret = CRYPTO_BUFFER_TOO_SMALL;
1027 		goto out;
1028 	}
1029 
1030 	saved_offset = ciphertext->cd_offset;
1031 	saved_length = ciphertext->cd_length;
1032 
1033 	/*
1034 	 * Do an update on the specified input data.
1035 	 */
1036 	switch (plaintext->cd_format) {
1037 	case CRYPTO_DATA_RAW:
1038 		ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1039 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1040 		break;
1041 	case CRYPTO_DATA_UIO:
1042 		ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1043 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1044 		break;
1045 	case CRYPTO_DATA_MBLK:
1046 		ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1047 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1048 		break;
1049 	default:
1050 		ret = CRYPTO_ARGUMENTS_BAD;
1051 	}
1052 
1053 	if (ret == CRYPTO_SUCCESS) {
1054 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1055 			ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1056 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1057 			    aes_xor_block);
1058 			if (ret != CRYPTO_SUCCESS)
1059 				goto out;
1060 			ASSERT(aes_ctx.ac_remainder_len == 0);
1061 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1062 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1063 			ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1064 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1065 			    aes_copy_block, aes_xor_block);
1066 			if (ret != CRYPTO_SUCCESS)
1067 				goto out;
1068 			ASSERT(aes_ctx.ac_remainder_len == 0);
1069 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1070 			if (aes_ctx.ac_remainder_len > 0) {
1071 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1072 				    ciphertext, aes_encrypt_block);
1073 				if (ret != CRYPTO_SUCCESS)
1074 					goto out;
1075 			}
1076 		} else {
1077 			ASSERT(aes_ctx.ac_remainder_len == 0);
1078 		}
1079 
1080 		if (plaintext != ciphertext) {
1081 			ciphertext->cd_length =
1082 			    ciphertext->cd_offset - saved_offset;
1083 		}
1084 	} else {
1085 		ciphertext->cd_length = saved_length;
1086 	}
1087 	ciphertext->cd_offset = saved_offset;
1088 
1089 out:
1090 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1091 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1092 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1093 	}
1094 
1095 	return (ret);
1096 }
1097 
1098 /* ARGSUSED */
1099 static int
1100 aes_decrypt_atomic(crypto_provider_handle_t provider,
1101     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1102     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1103     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1104 {
1105 	aes_ctx_t aes_ctx;	/* on the stack */
1106 	off_t saved_offset;
1107 	size_t saved_length;
1108 	size_t length_needed;
1109 	int ret;
1110 
1111 	AES_ARG_INPLACE(ciphertext, plaintext);
1112 
1113 	/*
1114 	 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1115 	 * be a multiple of AES block size.
1116 	 */
1117 	switch (mechanism->cm_type) {
1118 	case AES_CTR_MECH_INFO_TYPE:
1119 	case AES_CCM_MECH_INFO_TYPE:
1120 	case AES_GCM_MECH_INFO_TYPE:
1121 	case AES_GMAC_MECH_INFO_TYPE:
1122 		break;
1123 	default:
1124 		if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1125 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1126 	}
1127 
1128 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1129 		return (ret);
1130 
1131 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1132 
1133 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1134 	    crypto_kmflag(req), B_FALSE);
1135 	if (ret != CRYPTO_SUCCESS)
1136 		return (ret);
1137 
1138 	switch (mechanism->cm_type) {
1139 	case AES_CCM_MECH_INFO_TYPE:
1140 		length_needed = aes_ctx.ac_data_len;
1141 		break;
1142 	case AES_GCM_MECH_INFO_TYPE:
1143 		length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1144 		break;
1145 	case AES_GMAC_MECH_INFO_TYPE:
1146 		if (plaintext->cd_length != 0)
1147 			return (CRYPTO_ARGUMENTS_BAD);
1148 		length_needed = 0;
1149 		break;
1150 	default:
1151 		length_needed = ciphertext->cd_length;
1152 	}
1153 
1154 	/* return size of buffer needed to store output */
1155 	if (plaintext->cd_length < length_needed) {
1156 		plaintext->cd_length = length_needed;
1157 		ret = CRYPTO_BUFFER_TOO_SMALL;
1158 		goto out;
1159 	}
1160 
1161 	saved_offset = plaintext->cd_offset;
1162 	saved_length = plaintext->cd_length;
1163 
1164 	if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1165 	    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1166 		gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1167 
1168 	/*
1169 	 * Do an update on the specified input data.
1170 	 */
1171 	switch (ciphertext->cd_format) {
1172 	case CRYPTO_DATA_RAW:
1173 		ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1174 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1175 		break;
1176 	case CRYPTO_DATA_UIO:
1177 		ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1178 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1179 		break;
1180 	case CRYPTO_DATA_MBLK:
1181 		ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1182 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1183 		break;
1184 	default:
1185 		ret = CRYPTO_ARGUMENTS_BAD;
1186 	}
1187 
1188 	if (ret == CRYPTO_SUCCESS) {
1189 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1190 			ASSERT(aes_ctx.ac_processed_data_len
1191 			    == aes_ctx.ac_data_len);
1192 			ASSERT(aes_ctx.ac_processed_mac_len
1193 			    == aes_ctx.ac_mac_len);
1194 			ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1195 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1196 			    aes_copy_block, aes_xor_block);
1197 			ASSERT(aes_ctx.ac_remainder_len == 0);
1198 			if ((ret == CRYPTO_SUCCESS) &&
1199 			    (ciphertext != plaintext)) {
1200 				plaintext->cd_length =
1201 				    plaintext->cd_offset - saved_offset;
1202 			} else {
1203 				plaintext->cd_length = saved_length;
1204 			}
1205 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1206 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1207 			ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1208 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1209 			    aes_xor_block);
1210 			ASSERT(aes_ctx.ac_remainder_len == 0);
1211 			if ((ret == CRYPTO_SUCCESS) &&
1212 			    (ciphertext != plaintext)) {
1213 				plaintext->cd_length =
1214 				    plaintext->cd_offset - saved_offset;
1215 			} else {
1216 				plaintext->cd_length = saved_length;
1217 			}
1218 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1219 			ASSERT(aes_ctx.ac_remainder_len == 0);
1220 			if (ciphertext != plaintext)
1221 				plaintext->cd_length =
1222 				    plaintext->cd_offset - saved_offset;
1223 		} else {
1224 			if (aes_ctx.ac_remainder_len > 0) {
1225 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1226 				    plaintext, aes_encrypt_block);
1227 				if (ret == CRYPTO_DATA_LEN_RANGE)
1228 					ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1229 				if (ret != CRYPTO_SUCCESS)
1230 					goto out;
1231 			}
1232 			if (ciphertext != plaintext)
1233 				plaintext->cd_length =
1234 				    plaintext->cd_offset - saved_offset;
1235 		}
1236 	} else {
1237 		plaintext->cd_length = saved_length;
1238 	}
1239 	plaintext->cd_offset = saved_offset;
1240 
1241 out:
1242 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1243 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1244 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1245 	}
1246 
1247 	if (aes_ctx.ac_flags & CCM_MODE) {
1248 		if (aes_ctx.ac_pt_buf != NULL) {
1249 			kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1250 		}
1251 	} else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1252 		if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1253 			kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1254 			    ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1255 		}
1256 	}
1257 
1258 	return (ret);
1259 }
1260 
1261 /*
1262  * KCF software provider context template entry points.
1263  */
1264 /* ARGSUSED */
1265 static int
1266 aes_create_ctx_template(crypto_provider_handle_t provider,
1267     crypto_mechanism_t *mechanism, crypto_key_t *key,
1268     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1269 {
1270 	void *keysched;
1271 	size_t size;
1272 	int rv;
1273 
1274 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1275 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1276 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1277 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1278 	    mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1279 	    mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1280 		return (CRYPTO_MECHANISM_INVALID);
1281 
1282 	if ((keysched = aes_alloc_keysched(&size,
1283 	    crypto_kmflag(req))) == NULL) {
1284 		return (CRYPTO_HOST_MEMORY);
1285 	}
1286 
1287 	/*
1288 	 * Initialize key schedule.  Key length information is stored
1289 	 * in the key.
1290 	 */
1291 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1292 		bzero(keysched, size);
1293 		kmem_free(keysched, size);
1294 		return (rv);
1295 	}
1296 
1297 	*tmpl = keysched;
1298 	*tmpl_size = size;
1299 
1300 	return (CRYPTO_SUCCESS);
1301 }
1302 
1303 
1304 static int
1305 aes_free_context(crypto_ctx_t *ctx)
1306 {
1307 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1308 
1309 	if (aes_ctx != NULL) {
1310 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1311 			ASSERT(aes_ctx->ac_keysched_len != 0);
1312 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1313 			kmem_free(aes_ctx->ac_keysched,
1314 			    aes_ctx->ac_keysched_len);
1315 		}
1316 		crypto_free_mode_ctx(aes_ctx);
1317 		ctx->cc_provider_private = NULL;
1318 	}
1319 
1320 	return (CRYPTO_SUCCESS);
1321 }
1322 
1323 
1324 static int
1325 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1326     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1327     boolean_t is_encrypt_init)
1328 {
1329 	int rv = CRYPTO_SUCCESS;
1330 	void *keysched;
1331 	size_t size;
1332 
1333 	if (template == NULL) {
1334 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1335 			return (CRYPTO_HOST_MEMORY);
1336 		/*
1337 		 * Initialize key schedule.
1338 		 * Key length is stored in the key.
1339 		 */
1340 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1341 			kmem_free(keysched, size);
1342 			return (rv);
1343 		}
1344 
1345 		aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1346 		aes_ctx->ac_keysched_len = size;
1347 	} else {
1348 		keysched = template;
1349 	}
1350 	aes_ctx->ac_keysched = keysched;
1351 
1352 	switch (mechanism->cm_type) {
1353 	case AES_CBC_MECH_INFO_TYPE:
1354 		rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1355 		    mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1356 		break;
1357 	case AES_CTR_MECH_INFO_TYPE: {
1358 		CK_AES_CTR_PARAMS *pp;
1359 
1360 		if (mechanism->cm_param == NULL ||
1361 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1362 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1363 		}
1364 		pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1365 		rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1366 		    pp->cb, aes_copy_block);
1367 		break;
1368 	}
1369 	case AES_CCM_MECH_INFO_TYPE:
1370 		if (mechanism->cm_param == NULL ||
1371 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1372 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1373 		}
1374 		rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1375 		    kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1376 		    aes_xor_block);
1377 		break;
1378 	case AES_GCM_MECH_INFO_TYPE:
1379 		if (mechanism->cm_param == NULL ||
1380 		    mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1381 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1382 		}
1383 		rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1384 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1385 		    aes_xor_block);
1386 		break;
1387 	case AES_GMAC_MECH_INFO_TYPE:
1388 		if (mechanism->cm_param == NULL ||
1389 		    mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1390 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1391 		}
1392 		rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1393 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1394 		    aes_xor_block);
1395 		break;
1396 	case AES_ECB_MECH_INFO_TYPE:
1397 		aes_ctx->ac_flags |= ECB_MODE;
1398 	}
1399 
1400 	if (rv != CRYPTO_SUCCESS) {
1401 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1402 			bzero(keysched, size);
1403 			kmem_free(keysched, size);
1404 		}
1405 	}
1406 
1407 	return (rv);
1408 }
1409 
1410 static int
1411 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1412     CK_AES_GCM_PARAMS *gcm_params)
1413 {
1414 	/* LINTED: pointer alignment */
1415 	CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1416 
1417 	if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1418 		return (CRYPTO_MECHANISM_INVALID);
1419 
1420 	if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1421 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1422 
1423 	if (params->pIv == NULL)
1424 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1425 
1426 	gcm_params->pIv = params->pIv;
1427 	gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1428 	gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1429 
1430 	if (data == NULL)
1431 		return (CRYPTO_SUCCESS);
1432 
1433 	if (data->cd_format != CRYPTO_DATA_RAW)
1434 		return (CRYPTO_ARGUMENTS_BAD);
1435 
1436 	gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1437 	gcm_params->ulAADLen = data->cd_length;
1438 	return (CRYPTO_SUCCESS);
1439 }
1440 
1441 static int
1442 aes_mac_atomic(crypto_provider_handle_t provider,
1443     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1444     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1445     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1446 {
1447 	CK_AES_GCM_PARAMS gcm_params;
1448 	crypto_mechanism_t gcm_mech;
1449 	int rv;
1450 
1451 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1452 	    != CRYPTO_SUCCESS)
1453 		return (rv);
1454 
1455 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1456 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1457 	gcm_mech.cm_param = (char *)&gcm_params;
1458 
1459 	return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1460 	    key, &null_crypto_data, mac, template, req));
1461 }
1462 
1463 static int
1464 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1465     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1466     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1467     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1468 {
1469 	CK_AES_GCM_PARAMS gcm_params;
1470 	crypto_mechanism_t gcm_mech;
1471 	int rv;
1472 
1473 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1474 	    != CRYPTO_SUCCESS)
1475 		return (rv);
1476 
1477 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1478 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1479 	gcm_mech.cm_param = (char *)&gcm_params;
1480 
1481 	return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1482 	    key, mac, &null_crypto_data, template, req));
1483 }
1484