xref: /titanic_41/usr/src/uts/common/crypto/io/aes.c (revision 56f9a274cc7ca7f2d6f19959b2db143d94a4e7e0)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 /*
27  * AES provider for the Kernel Cryptographic Framework (KCF)
28  */
29 
30 #include <sys/types.h>
31 #include <sys/systm.h>
32 #include <sys/modctl.h>
33 #include <sys/cmn_err.h>
34 #include <sys/ddi.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/crypto/spi.h>
38 #include <sys/sysmacros.h>
39 #include <sys/strsun.h>
40 #include <modes/modes.h>
41 #include <aes/aes_impl.h>
42 
43 extern struct mod_ops mod_cryptoops;
44 
45 /*
46  * Module linkage information for the kernel.
47  */
48 static struct modlcrypto modlcrypto = {
49 	&mod_cryptoops,
50 	"AES Kernel SW Provider"
51 };
52 
53 static struct modlinkage modlinkage = {
54 	MODREV_1,
55 	(void *)&modlcrypto,
56 	NULL
57 };
58 
59 /*
60  * CSPI information (entry points, provider info, etc.)
61  */
62 typedef enum aes_mech_type {
63 	AES_ECB_MECH_INFO_TYPE,		/* SUN_CKM_AES_ECB */
64 	AES_CBC_MECH_INFO_TYPE,		/* SUN_CKM_AES_CBC */
65 	AES_CBC_PAD_MECH_INFO_TYPE,	/* SUN_CKM_AES_CBC_PAD */
66 	AES_CTR_MECH_INFO_TYPE,		/* SUN_CKM_AES_CTR */
67 	AES_CCM_MECH_INFO_TYPE,		/* SUN_CKM_AES_CCM */
68 	AES_GCM_MECH_INFO_TYPE,		/* SUN_CKM_AES_GCM */
69 	AES_GMAC_MECH_INFO_TYPE		/* SUN_CKM_AES_GMAC */
70 } aes_mech_type_t;
71 
72 /*
73  * The following definitions are to keep EXPORT_SRC happy.
74  */
75 #ifndef AES_MIN_KEY_BYTES
76 #define	AES_MIN_KEY_BYTES		0
77 #endif
78 
79 #ifndef AES_MAX_KEY_BYTES
80 #define	AES_MAX_KEY_BYTES		0
81 #endif
82 
83 /*
84  * Mechanism info structure passed to KCF during registration.
85  */
86 static crypto_mech_info_t aes_mech_info_tab[] = {
87 	/* AES_ECB */
88 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
89 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
90 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
91 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
92 	/* AES_CBC */
93 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
94 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
95 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
96 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
97 	/* AES_CTR */
98 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
99 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
100 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
101 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
102 	/* AES_CCM */
103 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
104 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
105 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
106 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
107 	/* AES_GCM */
108 	{SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
109 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
110 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
111 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
112 	/* AES_GMAC */
113 	{SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
114 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
115 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
116 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
117 	    CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
118 	    CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
119 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
120 };
121 
122 /* operations are in-place if the output buffer is NULL */
123 #define	AES_ARG_INPLACE(input, output)				\
124 	if ((output) == NULL)					\
125 		(output) = (input);
126 
127 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
128 
129 static crypto_control_ops_t aes_control_ops = {
130 	aes_provider_status
131 };
132 
133 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
134     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
135 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
136     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
137 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
138     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
139 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
140     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
141 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
142     crypto_req_handle_t);
143 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
144     crypto_req_handle_t);
145 
146 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
147     crypto_req_handle_t);
148 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
149     crypto_data_t *, crypto_req_handle_t);
150 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
151     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
152     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
153 
154 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
155     crypto_req_handle_t);
156 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
157     crypto_data_t *, crypto_req_handle_t);
158 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
159     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
160     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
161 
162 static crypto_cipher_ops_t aes_cipher_ops = {
163 	aes_encrypt_init,
164 	aes_encrypt,
165 	aes_encrypt_update,
166 	aes_encrypt_final,
167 	aes_encrypt_atomic,
168 	aes_decrypt_init,
169 	aes_decrypt,
170 	aes_decrypt_update,
171 	aes_decrypt_final,
172 	aes_decrypt_atomic
173 };
174 
175 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
176     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
177     crypto_spi_ctx_template_t, crypto_req_handle_t);
178 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
179     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
180     crypto_spi_ctx_template_t, crypto_req_handle_t);
181 
182 static crypto_mac_ops_t aes_mac_ops = {
183 	NULL,
184 	NULL,
185 	NULL,
186 	NULL,
187 	aes_mac_atomic,
188 	aes_mac_verify_atomic
189 };
190 
191 static int aes_create_ctx_template(crypto_provider_handle_t,
192     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
193     size_t *, crypto_req_handle_t);
194 static int aes_free_context(crypto_ctx_t *);
195 
196 static crypto_ctx_ops_t aes_ctx_ops = {
197 	aes_create_ctx_template,
198 	aes_free_context
199 };
200 
201 static crypto_ops_t aes_crypto_ops = {
202 	&aes_control_ops,
203 	NULL,
204 	&aes_cipher_ops,
205 	&aes_mac_ops,
206 	NULL,
207 	NULL,
208 	NULL,
209 	NULL,
210 	NULL,
211 	NULL,
212 	NULL,
213 	NULL,
214 	NULL,
215 	&aes_ctx_ops
216 };
217 
218 static crypto_provider_info_t aes_prov_info = {
219 	CRYPTO_SPI_VERSION_1,
220 	"AES Software Provider",
221 	CRYPTO_SW_PROVIDER,
222 	{&modlinkage},
223 	NULL,
224 	&aes_crypto_ops,
225 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
226 	aes_mech_info_tab
227 };
228 
229 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
230 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
231 
232 int
233 _init(void)
234 {
235 	int ret;
236 
237 	/*
238 	 * Register with KCF. If the registration fails, return error.
239 	 */
240 	if ((ret = crypto_register_provider(&aes_prov_info,
241 	    &aes_prov_handle)) != CRYPTO_SUCCESS) {
242 		cmn_err(CE_WARN, "%s _init: crypto_register_provider()"
243 		    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
244 		return (EACCES);
245 	}
246 
247 	if ((ret = mod_install(&modlinkage)) != 0) {
248 		int rv;
249 
250 		ASSERT(aes_prov_handle != NULL);
251 		/* We should not return if the unregister returns busy. */
252 		while ((rv = crypto_unregister_provider(aes_prov_handle))
253 		    == CRYPTO_BUSY) {
254 			cmn_err(CE_WARN,
255 			    "%s _init: crypto_unregister_provider() "
256 			    "failed (0x%x). Retrying.",
257 			    CRYPTO_PROVIDER_NAME, rv);
258 			/* wait 10 seconds and try again. */
259 			delay(10 * drv_usectohz(1000000));
260 		}
261 	}
262 
263 	return (ret);
264 }
265 
266 int
267 _fini(void)
268 {
269 	int ret;
270 
271 	/*
272 	 * Unregister from KCF if previous registration succeeded.
273 	 */
274 	if (aes_prov_handle != NULL) {
275 		if ((ret = crypto_unregister_provider(aes_prov_handle)) !=
276 		    CRYPTO_SUCCESS) {
277 			cmn_err(CE_WARN,
278 			    "%s _fini: crypto_unregister_provider() "
279 			    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
280 			return (EBUSY);
281 		}
282 		aes_prov_handle = NULL;
283 	}
284 
285 	return (mod_remove(&modlinkage));
286 }
287 
288 int
289 _info(struct modinfo *modinfop)
290 {
291 	return (mod_info(&modlinkage, modinfop));
292 }
293 
294 
295 static int
296 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
297 {
298 	void *p = NULL;
299 	boolean_t param_required = B_TRUE;
300 	size_t param_len;
301 	void *(*alloc_fun)(int);
302 	int rv = CRYPTO_SUCCESS;
303 
304 	switch (mechanism->cm_type) {
305 	case AES_ECB_MECH_INFO_TYPE:
306 		param_required = B_FALSE;
307 		alloc_fun = ecb_alloc_ctx;
308 		break;
309 	case AES_CBC_MECH_INFO_TYPE:
310 		param_len = AES_BLOCK_LEN;
311 		alloc_fun = cbc_alloc_ctx;
312 		break;
313 	case AES_CTR_MECH_INFO_TYPE:
314 		param_len = sizeof (CK_AES_CTR_PARAMS);
315 		alloc_fun = ctr_alloc_ctx;
316 		break;
317 	case AES_CCM_MECH_INFO_TYPE:
318 		param_len = sizeof (CK_AES_CCM_PARAMS);
319 		alloc_fun = ccm_alloc_ctx;
320 		break;
321 	case AES_GCM_MECH_INFO_TYPE:
322 		param_len = sizeof (CK_AES_GCM_PARAMS);
323 		alloc_fun = gcm_alloc_ctx;
324 		break;
325 	case AES_GMAC_MECH_INFO_TYPE:
326 		param_len = sizeof (CK_AES_GMAC_PARAMS);
327 		alloc_fun = gmac_alloc_ctx;
328 		break;
329 	default:
330 		rv = CRYPTO_MECHANISM_INVALID;
331 	}
332 	if (param_required && mechanism->cm_param != NULL &&
333 	    mechanism->cm_param_len != param_len) {
334 		rv = CRYPTO_MECHANISM_PARAM_INVALID;
335 	}
336 	if (ctx != NULL) {
337 		p = (alloc_fun)(kmflag);
338 		*ctx = p;
339 	}
340 	return (rv);
341 }
342 
343 /* EXPORT DELETE START */
344 
345 /*
346  * Initialize key schedules for AES
347  */
348 static int
349 init_keysched(crypto_key_t *key, void *newbie)
350 {
351 	/*
352 	 * Only keys by value are supported by this module.
353 	 */
354 	switch (key->ck_format) {
355 	case CRYPTO_KEY_RAW:
356 		if (key->ck_length < AES_MINBITS ||
357 		    key->ck_length > AES_MAXBITS) {
358 			return (CRYPTO_KEY_SIZE_RANGE);
359 		}
360 
361 		/* key length must be either 128, 192, or 256 */
362 		if ((key->ck_length & 63) != 0)
363 			return (CRYPTO_KEY_SIZE_RANGE);
364 		break;
365 	default:
366 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
367 	}
368 
369 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
370 	return (CRYPTO_SUCCESS);
371 }
372 
373 /* EXPORT DELETE END */
374 
375 /*
376  * KCF software provider control entry points.
377  */
378 /* ARGSUSED */
379 static void
380 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
381 {
382 	*status = CRYPTO_PROVIDER_READY;
383 }
384 
385 static int
386 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
387     crypto_key_t *key, crypto_spi_ctx_template_t template,
388     crypto_req_handle_t req) {
389 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
390 }
391 
392 static int
393 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
394     crypto_key_t *key, crypto_spi_ctx_template_t template,
395     crypto_req_handle_t req) {
396 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
397 }
398 
399 
400 
401 /*
402  * KCF software provider encrypt entry points.
403  */
404 static int
405 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
406     crypto_key_t *key, crypto_spi_ctx_template_t template,
407     crypto_req_handle_t req, boolean_t is_encrypt_init)
408 {
409 
410 /* EXPORT DELETE START */
411 
412 	aes_ctx_t *aes_ctx;
413 	int rv;
414 	int kmflag;
415 
416 	/*
417 	 * Only keys by value are supported by this module.
418 	 */
419 	if (key->ck_format != CRYPTO_KEY_RAW) {
420 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
421 	}
422 
423 	kmflag = crypto_kmflag(req);
424 	if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
425 	    != CRYPTO_SUCCESS)
426 		return (rv);
427 
428 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
429 	    is_encrypt_init);
430 	if (rv != CRYPTO_SUCCESS) {
431 		crypto_free_mode_ctx(aes_ctx);
432 		return (rv);
433 	}
434 
435 	ctx->cc_provider_private = aes_ctx;
436 
437 /* EXPORT DELETE END */
438 
439 	return (CRYPTO_SUCCESS);
440 }
441 
442 static void
443 aes_copy_block64(uint8_t *in, uint64_t *out)
444 {
445 	if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
446 		/* LINTED: pointer alignment */
447 		out[0] = *(uint64_t *)&in[0];
448 		/* LINTED: pointer alignment */
449 		out[1] = *(uint64_t *)&in[8];
450 	} else {
451 		uint8_t *iv8 = (uint8_t *)&out[0];
452 
453 		AES_COPY_BLOCK(in, iv8);
454 	}
455 }
456 
457 
458 static int
459 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
460     crypto_data_t *ciphertext, crypto_req_handle_t req)
461 {
462 	int ret = CRYPTO_FAILED;
463 
464 /* EXPORT DELETE START */
465 
466 	aes_ctx_t *aes_ctx;
467 	size_t saved_length, saved_offset, length_needed;
468 
469 	ASSERT(ctx->cc_provider_private != NULL);
470 	aes_ctx = ctx->cc_provider_private;
471 
472 	/*
473 	 * For block ciphers, plaintext must be a multiple of AES block size.
474 	 * This test is only valid for ciphers whose blocksize is a power of 2.
475 	 */
476 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
477 	    == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
478 		return (CRYPTO_DATA_LEN_RANGE);
479 
480 	AES_ARG_INPLACE(plaintext, ciphertext);
481 
482 	/*
483 	 * We need to just return the length needed to store the output.
484 	 * We should not destroy the context for the following case.
485 	 */
486 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
487 	case CCM_MODE:
488 		length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
489 		break;
490 	case GCM_MODE:
491 		length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
492 		break;
493 	case GMAC_MODE:
494 		if (plaintext->cd_length != 0)
495 			return (CRYPTO_ARGUMENTS_BAD);
496 
497 		length_needed = aes_ctx->ac_tag_len;
498 		break;
499 	default:
500 		length_needed = plaintext->cd_length;
501 	}
502 
503 	if (ciphertext->cd_length < length_needed) {
504 		ciphertext->cd_length = length_needed;
505 		return (CRYPTO_BUFFER_TOO_SMALL);
506 	}
507 
508 	saved_length = ciphertext->cd_length;
509 	saved_offset = ciphertext->cd_offset;
510 
511 	/*
512 	 * Do an update on the specified input data.
513 	 */
514 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
515 	if (ret != CRYPTO_SUCCESS) {
516 		return (ret);
517 	}
518 
519 	/*
520 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
521 	 * left-over unprocessed data, and compute the MAC
522 	 */
523 	if (aes_ctx->ac_flags & CCM_MODE) {
524 		/*
525 		 * ccm_encrypt_final() will compute the MAC and append
526 		 * it to existing ciphertext. So, need to adjust the left over
527 		 * length value accordingly
528 		 */
529 
530 		/* order of following 2 lines MUST not be reversed */
531 		ciphertext->cd_offset = ciphertext->cd_length;
532 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
533 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
534 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
535 		if (ret != CRYPTO_SUCCESS) {
536 			return (ret);
537 		}
538 
539 		if (plaintext != ciphertext) {
540 			ciphertext->cd_length =
541 			    ciphertext->cd_offset - saved_offset;
542 		}
543 		ciphertext->cd_offset = saved_offset;
544 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
545 		/*
546 		 * gcm_encrypt_final() will compute the MAC and append
547 		 * it to existing ciphertext. So, need to adjust the left over
548 		 * length value accordingly
549 		 */
550 
551 		/* order of following 2 lines MUST not be reversed */
552 		ciphertext->cd_offset = ciphertext->cd_length;
553 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
554 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
555 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
556 		    aes_xor_block);
557 		if (ret != CRYPTO_SUCCESS) {
558 			return (ret);
559 		}
560 
561 		if (plaintext != ciphertext) {
562 			ciphertext->cd_length =
563 			    ciphertext->cd_offset - saved_offset;
564 		}
565 		ciphertext->cd_offset = saved_offset;
566 	}
567 
568 	ASSERT(aes_ctx->ac_remainder_len == 0);
569 	(void) aes_free_context(ctx);
570 
571 /* EXPORT DELETE END */
572 
573 	return (ret);
574 }
575 
576 
577 static int
578 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
579     crypto_data_t *plaintext, crypto_req_handle_t req)
580 {
581 	int ret = CRYPTO_FAILED;
582 
583 /* EXPORT DELETE START */
584 
585 	aes_ctx_t *aes_ctx;
586 	off_t saved_offset;
587 	size_t saved_length, length_needed;
588 
589 	ASSERT(ctx->cc_provider_private != NULL);
590 	aes_ctx = ctx->cc_provider_private;
591 
592 	/*
593 	 * For block ciphers, plaintext must be a multiple of AES block size.
594 	 * This test is only valid for ciphers whose blocksize is a power of 2.
595 	 */
596 	if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
597 	    == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
598 		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
599 	}
600 
601 	AES_ARG_INPLACE(ciphertext, plaintext);
602 
603 	/*
604 	 * Return length needed to store the output.
605 	 * Do not destroy context when plaintext buffer is too small.
606 	 *
607 	 * CCM:  plaintext is MAC len smaller than cipher text
608 	 * GCM:  plaintext is TAG len smaller than cipher text
609 	 * GMAC: plaintext length must be zero
610 	 */
611 	switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
612 	case CCM_MODE:
613 		length_needed = aes_ctx->ac_processed_data_len;
614 		break;
615 	case GCM_MODE:
616 		length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
617 		break;
618 	case GMAC_MODE:
619 		if (plaintext->cd_length != 0)
620 			return (CRYPTO_ARGUMENTS_BAD);
621 
622 		length_needed = 0;
623 		break;
624 	default:
625 		length_needed = ciphertext->cd_length;
626 	}
627 
628 	if (plaintext->cd_length < length_needed) {
629 		plaintext->cd_length = length_needed;
630 		return (CRYPTO_BUFFER_TOO_SMALL);
631 	}
632 
633 	saved_offset = plaintext->cd_offset;
634 	saved_length = plaintext->cd_length;
635 
636 	/*
637 	 * Do an update on the specified input data.
638 	 */
639 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
640 	if (ret != CRYPTO_SUCCESS) {
641 		goto cleanup;
642 	}
643 
644 	if (aes_ctx->ac_flags & CCM_MODE) {
645 		ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
646 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
647 
648 		/* order of following 2 lines MUST not be reversed */
649 		plaintext->cd_offset = plaintext->cd_length;
650 		plaintext->cd_length = saved_length - plaintext->cd_length;
651 
652 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
653 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
654 		    aes_xor_block);
655 		if (ret == CRYPTO_SUCCESS) {
656 			if (plaintext != ciphertext) {
657 				plaintext->cd_length =
658 				    plaintext->cd_offset - saved_offset;
659 			}
660 		} else {
661 			plaintext->cd_length = saved_length;
662 		}
663 
664 		plaintext->cd_offset = saved_offset;
665 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
666 		/* order of following 2 lines MUST not be reversed */
667 		plaintext->cd_offset = plaintext->cd_length;
668 		plaintext->cd_length = saved_length - plaintext->cd_length;
669 
670 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
671 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
672 		if (ret == CRYPTO_SUCCESS) {
673 			if (plaintext != ciphertext) {
674 				plaintext->cd_length =
675 				    plaintext->cd_offset - saved_offset;
676 			}
677 		} else {
678 			plaintext->cd_length = saved_length;
679 		}
680 
681 		plaintext->cd_offset = saved_offset;
682 	}
683 
684 	ASSERT(aes_ctx->ac_remainder_len == 0);
685 
686 cleanup:
687 	(void) aes_free_context(ctx);
688 
689 /* EXPORT DELETE END */
690 
691 	return (ret);
692 }
693 
694 
695 /* ARGSUSED */
696 static int
697 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
698     crypto_data_t *ciphertext, crypto_req_handle_t req)
699 {
700 	off_t saved_offset;
701 	size_t saved_length, out_len;
702 	int ret = CRYPTO_SUCCESS;
703 	aes_ctx_t *aes_ctx;
704 
705 	ASSERT(ctx->cc_provider_private != NULL);
706 	aes_ctx = ctx->cc_provider_private;
707 
708 	AES_ARG_INPLACE(plaintext, ciphertext);
709 
710 	/* compute number of bytes that will hold the ciphertext */
711 	out_len = aes_ctx->ac_remainder_len;
712 	out_len += plaintext->cd_length;
713 	out_len &= ~(AES_BLOCK_LEN - 1);
714 
715 	/* return length needed to store the output */
716 	if (ciphertext->cd_length < out_len) {
717 		ciphertext->cd_length = out_len;
718 		return (CRYPTO_BUFFER_TOO_SMALL);
719 	}
720 
721 	saved_offset = ciphertext->cd_offset;
722 	saved_length = ciphertext->cd_length;
723 
724 	/*
725 	 * Do the AES update on the specified input data.
726 	 */
727 	switch (plaintext->cd_format) {
728 	case CRYPTO_DATA_RAW:
729 		ret = crypto_update_iov(ctx->cc_provider_private,
730 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
731 		    aes_copy_block64);
732 		break;
733 	case CRYPTO_DATA_UIO:
734 		ret = crypto_update_uio(ctx->cc_provider_private,
735 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
736 		    aes_copy_block64);
737 		break;
738 	case CRYPTO_DATA_MBLK:
739 		ret = crypto_update_mp(ctx->cc_provider_private,
740 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks,
741 		    aes_copy_block64);
742 		break;
743 	default:
744 		ret = CRYPTO_ARGUMENTS_BAD;
745 	}
746 
747 	/*
748 	 * Since AES counter mode is a stream cipher, we call
749 	 * ctr_mode_final() to pick up any remaining bytes.
750 	 * It is an internal function that does not destroy
751 	 * the context like *normal* final routines.
752 	 */
753 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
754 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
755 		    ciphertext, aes_encrypt_block);
756 	}
757 
758 	if (ret == CRYPTO_SUCCESS) {
759 		if (plaintext != ciphertext)
760 			ciphertext->cd_length =
761 			    ciphertext->cd_offset - saved_offset;
762 	} else {
763 		ciphertext->cd_length = saved_length;
764 	}
765 	ciphertext->cd_offset = saved_offset;
766 
767 	return (ret);
768 }
769 
770 
771 static int
772 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
773     crypto_data_t *plaintext, crypto_req_handle_t req)
774 {
775 	off_t saved_offset;
776 	size_t saved_length, out_len;
777 	int ret = CRYPTO_SUCCESS;
778 	aes_ctx_t *aes_ctx;
779 
780 	ASSERT(ctx->cc_provider_private != NULL);
781 	aes_ctx = ctx->cc_provider_private;
782 
783 	AES_ARG_INPLACE(ciphertext, plaintext);
784 
785 	/*
786 	 * Compute number of bytes that will hold the plaintext.
787 	 * This is not necessary for CCM, GCM, and GMAC since these
788 	 * mechanisms never return plaintext for update operations.
789 	 */
790 	if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
791 		out_len = aes_ctx->ac_remainder_len;
792 		out_len += ciphertext->cd_length;
793 		out_len &= ~(AES_BLOCK_LEN - 1);
794 
795 		/* return length needed to store the output */
796 		if (plaintext->cd_length < out_len) {
797 			plaintext->cd_length = out_len;
798 			return (CRYPTO_BUFFER_TOO_SMALL);
799 		}
800 	}
801 
802 	saved_offset = plaintext->cd_offset;
803 	saved_length = plaintext->cd_length;
804 
805 	if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
806 		gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
807 
808 	/*
809 	 * Do the AES update on the specified input data.
810 	 */
811 	switch (ciphertext->cd_format) {
812 	case CRYPTO_DATA_RAW:
813 		ret = crypto_update_iov(ctx->cc_provider_private,
814 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
815 		    aes_copy_block64);
816 		break;
817 	case CRYPTO_DATA_UIO:
818 		ret = crypto_update_uio(ctx->cc_provider_private,
819 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
820 		    aes_copy_block64);
821 		break;
822 	case CRYPTO_DATA_MBLK:
823 		ret = crypto_update_mp(ctx->cc_provider_private,
824 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks,
825 		    aes_copy_block64);
826 		break;
827 	default:
828 		ret = CRYPTO_ARGUMENTS_BAD;
829 	}
830 
831 	/*
832 	 * Since AES counter mode is a stream cipher, we call
833 	 * ctr_mode_final() to pick up any remaining bytes.
834 	 * It is an internal function that does not destroy
835 	 * the context like *normal* final routines.
836 	 */
837 	if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
838 		ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
839 		    aes_encrypt_block);
840 		if (ret == CRYPTO_DATA_LEN_RANGE)
841 			ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
842 	}
843 
844 	if (ret == CRYPTO_SUCCESS) {
845 		if (ciphertext != plaintext)
846 			plaintext->cd_length =
847 			    plaintext->cd_offset - saved_offset;
848 	} else {
849 		plaintext->cd_length = saved_length;
850 	}
851 	plaintext->cd_offset = saved_offset;
852 
853 
854 	return (ret);
855 }
856 
857 /* ARGSUSED */
858 static int
859 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
860     crypto_req_handle_t req)
861 {
862 
863 /* EXPORT DELETE START */
864 
865 	aes_ctx_t *aes_ctx;
866 	int ret;
867 
868 	ASSERT(ctx->cc_provider_private != NULL);
869 	aes_ctx = ctx->cc_provider_private;
870 
871 	if (data->cd_format != CRYPTO_DATA_RAW &&
872 	    data->cd_format != CRYPTO_DATA_UIO &&
873 	    data->cd_format != CRYPTO_DATA_MBLK) {
874 		return (CRYPTO_ARGUMENTS_BAD);
875 	}
876 
877 	if (aes_ctx->ac_flags & CTR_MODE) {
878 		if (aes_ctx->ac_remainder_len > 0) {
879 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
880 			    aes_encrypt_block);
881 			if (ret != CRYPTO_SUCCESS)
882 				return (ret);
883 		}
884 	} else if (aes_ctx->ac_flags & CCM_MODE) {
885 		ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
886 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
887 		if (ret != CRYPTO_SUCCESS) {
888 			return (ret);
889 		}
890 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
891 		size_t saved_offset = data->cd_offset;
892 
893 		ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
894 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
895 		    aes_xor_block);
896 		if (ret != CRYPTO_SUCCESS) {
897 			return (ret);
898 		}
899 		data->cd_length = data->cd_offset - saved_offset;
900 		data->cd_offset = saved_offset;
901 	} else {
902 		/*
903 		 * There must be no unprocessed plaintext.
904 		 * This happens if the length of the last data is
905 		 * not a multiple of the AES block length.
906 		 */
907 		if (aes_ctx->ac_remainder_len > 0) {
908 			return (CRYPTO_DATA_LEN_RANGE);
909 		}
910 		data->cd_length = 0;
911 	}
912 
913 	(void) aes_free_context(ctx);
914 
915 /* EXPORT DELETE END */
916 
917 	return (CRYPTO_SUCCESS);
918 }
919 
920 /* ARGSUSED */
921 static int
922 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
923     crypto_req_handle_t req)
924 {
925 
926 /* EXPORT DELETE START */
927 
928 	aes_ctx_t *aes_ctx;
929 	int ret;
930 	off_t saved_offset;
931 	size_t saved_length;
932 
933 	ASSERT(ctx->cc_provider_private != NULL);
934 	aes_ctx = ctx->cc_provider_private;
935 
936 	if (data->cd_format != CRYPTO_DATA_RAW &&
937 	    data->cd_format != CRYPTO_DATA_UIO &&
938 	    data->cd_format != CRYPTO_DATA_MBLK) {
939 		return (CRYPTO_ARGUMENTS_BAD);
940 	}
941 
942 	/*
943 	 * There must be no unprocessed ciphertext.
944 	 * This happens if the length of the last ciphertext is
945 	 * not a multiple of the AES block length.
946 	 */
947 	if (aes_ctx->ac_remainder_len > 0) {
948 		if ((aes_ctx->ac_flags & CTR_MODE) == 0)
949 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
950 		else {
951 			ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
952 			    aes_encrypt_block);
953 			if (ret == CRYPTO_DATA_LEN_RANGE)
954 				ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
955 			if (ret != CRYPTO_SUCCESS)
956 				return (ret);
957 		}
958 	}
959 
960 	if (aes_ctx->ac_flags & CCM_MODE) {
961 		/*
962 		 * This is where all the plaintext is returned, make sure
963 		 * the plaintext buffer is big enough
964 		 */
965 		size_t pt_len = aes_ctx->ac_data_len;
966 		if (data->cd_length < pt_len) {
967 			data->cd_length = pt_len;
968 			return (CRYPTO_BUFFER_TOO_SMALL);
969 		}
970 
971 		ASSERT(aes_ctx->ac_processed_data_len == pt_len);
972 		ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
973 		saved_offset = data->cd_offset;
974 		saved_length = data->cd_length;
975 		ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
976 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
977 		    aes_xor_block);
978 		if (ret == CRYPTO_SUCCESS) {
979 			data->cd_length = data->cd_offset - saved_offset;
980 		} else {
981 			data->cd_length = saved_length;
982 		}
983 
984 		data->cd_offset = saved_offset;
985 		if (ret != CRYPTO_SUCCESS) {
986 			return (ret);
987 		}
988 	} else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
989 		/*
990 		 * This is where all the plaintext is returned, make sure
991 		 * the plaintext buffer is big enough
992 		 */
993 		gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
994 		size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
995 
996 		if (data->cd_length < pt_len) {
997 			data->cd_length = pt_len;
998 			return (CRYPTO_BUFFER_TOO_SMALL);
999 		}
1000 
1001 		saved_offset = data->cd_offset;
1002 		saved_length = data->cd_length;
1003 		ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
1004 		    AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
1005 		if (ret == CRYPTO_SUCCESS) {
1006 			data->cd_length = data->cd_offset - saved_offset;
1007 		} else {
1008 			data->cd_length = saved_length;
1009 		}
1010 
1011 		data->cd_offset = saved_offset;
1012 		if (ret != CRYPTO_SUCCESS) {
1013 			return (ret);
1014 		}
1015 	}
1016 
1017 
1018 	if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
1019 		data->cd_length = 0;
1020 	}
1021 
1022 	(void) aes_free_context(ctx);
1023 
1024 /* EXPORT DELETE END */
1025 
1026 	return (CRYPTO_SUCCESS);
1027 }
1028 
1029 /* ARGSUSED */
1030 static int
1031 aes_encrypt_atomic(crypto_provider_handle_t provider,
1032     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1033     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1034     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1035 {
1036 	aes_ctx_t aes_ctx;	/* on the stack */
1037 	off_t saved_offset;
1038 	size_t saved_length;
1039 	size_t length_needed;
1040 	int ret;
1041 
1042 	AES_ARG_INPLACE(plaintext, ciphertext);
1043 
1044 	/*
1045 	 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
1046 	 * be a multiple of AES block size.
1047 	 */
1048 	switch (mechanism->cm_type) {
1049 	case AES_CTR_MECH_INFO_TYPE:
1050 	case AES_CCM_MECH_INFO_TYPE:
1051 	case AES_GCM_MECH_INFO_TYPE:
1052 	case AES_GMAC_MECH_INFO_TYPE:
1053 		break;
1054 	default:
1055 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1056 			return (CRYPTO_DATA_LEN_RANGE);
1057 	}
1058 
1059 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1060 		return (ret);
1061 
1062 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1063 
1064 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1065 	    crypto_kmflag(req), B_TRUE);
1066 	if (ret != CRYPTO_SUCCESS)
1067 		return (ret);
1068 
1069 	switch (mechanism->cm_type) {
1070 	case AES_CCM_MECH_INFO_TYPE:
1071 		length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1072 		break;
1073 	case AES_GMAC_MECH_INFO_TYPE:
1074 		if (plaintext->cd_length != 0)
1075 			return (CRYPTO_ARGUMENTS_BAD);
1076 		/* FALLTHRU */
1077 	case AES_GCM_MECH_INFO_TYPE:
1078 		length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1079 		break;
1080 	default:
1081 		length_needed = plaintext->cd_length;
1082 	}
1083 
1084 	/* return size of buffer needed to store output */
1085 	if (ciphertext->cd_length < length_needed) {
1086 		ciphertext->cd_length = length_needed;
1087 		ret = CRYPTO_BUFFER_TOO_SMALL;
1088 		goto out;
1089 	}
1090 
1091 	saved_offset = ciphertext->cd_offset;
1092 	saved_length = ciphertext->cd_length;
1093 
1094 	/*
1095 	 * Do an update on the specified input data.
1096 	 */
1097 	switch (plaintext->cd_format) {
1098 	case CRYPTO_DATA_RAW:
1099 		ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1100 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1101 		break;
1102 	case CRYPTO_DATA_UIO:
1103 		ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1104 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1105 		break;
1106 	case CRYPTO_DATA_MBLK:
1107 		ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1108 		    aes_encrypt_contiguous_blocks, aes_copy_block64);
1109 		break;
1110 	default:
1111 		ret = CRYPTO_ARGUMENTS_BAD;
1112 	}
1113 
1114 	if (ret == CRYPTO_SUCCESS) {
1115 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1116 			ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1117 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1118 			    aes_xor_block);
1119 			if (ret != CRYPTO_SUCCESS)
1120 				goto out;
1121 			ASSERT(aes_ctx.ac_remainder_len == 0);
1122 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1123 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1124 			ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1125 			    ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1126 			    aes_copy_block, aes_xor_block);
1127 			if (ret != CRYPTO_SUCCESS)
1128 				goto out;
1129 			ASSERT(aes_ctx.ac_remainder_len == 0);
1130 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1131 			if (aes_ctx.ac_remainder_len > 0) {
1132 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1133 				    ciphertext, aes_encrypt_block);
1134 				if (ret != CRYPTO_SUCCESS)
1135 					goto out;
1136 			}
1137 		} else {
1138 			ASSERT(aes_ctx.ac_remainder_len == 0);
1139 		}
1140 
1141 		if (plaintext != ciphertext) {
1142 			ciphertext->cd_length =
1143 			    ciphertext->cd_offset - saved_offset;
1144 		}
1145 	} else {
1146 		ciphertext->cd_length = saved_length;
1147 	}
1148 	ciphertext->cd_offset = saved_offset;
1149 
1150 out:
1151 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1152 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1153 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1154 	}
1155 
1156 	return (ret);
1157 }
1158 
1159 /* ARGSUSED */
1160 static int
1161 aes_decrypt_atomic(crypto_provider_handle_t provider,
1162     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1163     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1164     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1165 {
1166 	aes_ctx_t aes_ctx;	/* on the stack */
1167 	off_t saved_offset;
1168 	size_t saved_length;
1169 	size_t length_needed;
1170 	int ret;
1171 
1172 	AES_ARG_INPLACE(ciphertext, plaintext);
1173 
1174 	/*
1175 	 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1176 	 * be a multiple of AES block size.
1177 	 */
1178 	switch (mechanism->cm_type) {
1179 	case AES_CTR_MECH_INFO_TYPE:
1180 	case AES_CCM_MECH_INFO_TYPE:
1181 	case AES_GCM_MECH_INFO_TYPE:
1182 	case AES_GMAC_MECH_INFO_TYPE:
1183 		break;
1184 	default:
1185 		if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1186 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1187 	}
1188 
1189 	if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1190 		return (ret);
1191 
1192 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1193 
1194 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1195 	    crypto_kmflag(req), B_FALSE);
1196 	if (ret != CRYPTO_SUCCESS)
1197 		return (ret);
1198 
1199 	switch (mechanism->cm_type) {
1200 	case AES_CCM_MECH_INFO_TYPE:
1201 		length_needed = aes_ctx.ac_data_len;
1202 		break;
1203 	case AES_GCM_MECH_INFO_TYPE:
1204 		length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1205 		break;
1206 	case AES_GMAC_MECH_INFO_TYPE:
1207 		if (plaintext->cd_length != 0)
1208 			return (CRYPTO_ARGUMENTS_BAD);
1209 		length_needed = 0;
1210 		break;
1211 	default:
1212 		length_needed = ciphertext->cd_length;
1213 	}
1214 
1215 	/* return size of buffer needed to store output */
1216 	if (plaintext->cd_length < length_needed) {
1217 		plaintext->cd_length = length_needed;
1218 		ret = CRYPTO_BUFFER_TOO_SMALL;
1219 		goto out;
1220 	}
1221 
1222 	saved_offset = plaintext->cd_offset;
1223 	saved_length = plaintext->cd_length;
1224 
1225 	if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1226 	    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1227 		gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1228 
1229 	/*
1230 	 * Do an update on the specified input data.
1231 	 */
1232 	switch (ciphertext->cd_format) {
1233 	case CRYPTO_DATA_RAW:
1234 		ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1235 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1236 		break;
1237 	case CRYPTO_DATA_UIO:
1238 		ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1239 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1240 		break;
1241 	case CRYPTO_DATA_MBLK:
1242 		ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1243 		    aes_decrypt_contiguous_blocks, aes_copy_block64);
1244 		break;
1245 	default:
1246 		ret = CRYPTO_ARGUMENTS_BAD;
1247 	}
1248 
1249 	if (ret == CRYPTO_SUCCESS) {
1250 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1251 			ASSERT(aes_ctx.ac_processed_data_len
1252 			    == aes_ctx.ac_data_len);
1253 			ASSERT(aes_ctx.ac_processed_mac_len
1254 			    == aes_ctx.ac_mac_len);
1255 			ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1256 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1257 			    aes_copy_block, aes_xor_block);
1258 			ASSERT(aes_ctx.ac_remainder_len == 0);
1259 			if ((ret == CRYPTO_SUCCESS) &&
1260 			    (ciphertext != plaintext)) {
1261 				plaintext->cd_length =
1262 				    plaintext->cd_offset - saved_offset;
1263 			} else {
1264 				plaintext->cd_length = saved_length;
1265 			}
1266 		} else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1267 		    mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1268 			ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1269 			    plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1270 			    aes_xor_block);
1271 			ASSERT(aes_ctx.ac_remainder_len == 0);
1272 			if ((ret == CRYPTO_SUCCESS) &&
1273 			    (ciphertext != plaintext)) {
1274 				plaintext->cd_length =
1275 				    plaintext->cd_offset - saved_offset;
1276 			} else {
1277 				plaintext->cd_length = saved_length;
1278 			}
1279 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1280 			ASSERT(aes_ctx.ac_remainder_len == 0);
1281 			if (ciphertext != plaintext)
1282 				plaintext->cd_length =
1283 				    plaintext->cd_offset - saved_offset;
1284 		} else {
1285 			if (aes_ctx.ac_remainder_len > 0) {
1286 				ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1287 				    plaintext, aes_encrypt_block);
1288 				if (ret == CRYPTO_DATA_LEN_RANGE)
1289 					ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1290 				if (ret != CRYPTO_SUCCESS)
1291 					goto out;
1292 			}
1293 			if (ciphertext != plaintext)
1294 				plaintext->cd_length =
1295 				    plaintext->cd_offset - saved_offset;
1296 		}
1297 	} else {
1298 		plaintext->cd_length = saved_length;
1299 	}
1300 	plaintext->cd_offset = saved_offset;
1301 
1302 out:
1303 	if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1304 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1305 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1306 	}
1307 
1308 	if (aes_ctx.ac_flags & CCM_MODE) {
1309 		if (aes_ctx.ac_pt_buf != NULL) {
1310 			kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1311 		}
1312 	} else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1313 		if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1314 			kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1315 			    ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1316 		}
1317 	}
1318 
1319 	return (ret);
1320 }
1321 
1322 /*
1323  * KCF software provider context template entry points.
1324  */
1325 /* ARGSUSED */
1326 static int
1327 aes_create_ctx_template(crypto_provider_handle_t provider,
1328     crypto_mechanism_t *mechanism, crypto_key_t *key,
1329     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1330 {
1331 
1332 /* EXPORT DELETE START */
1333 
1334 	void *keysched;
1335 	size_t size;
1336 	int rv;
1337 
1338 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1339 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1340 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1341 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1342 	    mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1343 	    mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1344 		return (CRYPTO_MECHANISM_INVALID);
1345 
1346 	if ((keysched = aes_alloc_keysched(&size,
1347 	    crypto_kmflag(req))) == NULL) {
1348 		return (CRYPTO_HOST_MEMORY);
1349 	}
1350 
1351 	/*
1352 	 * Initialize key schedule.  Key length information is stored
1353 	 * in the key.
1354 	 */
1355 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1356 		bzero(keysched, size);
1357 		kmem_free(keysched, size);
1358 		return (rv);
1359 	}
1360 
1361 	*tmpl = keysched;
1362 	*tmpl_size = size;
1363 
1364 /* EXPORT DELETE END */
1365 
1366 	return (CRYPTO_SUCCESS);
1367 }
1368 
1369 
1370 static int
1371 aes_free_context(crypto_ctx_t *ctx)
1372 {
1373 
1374 /* EXPORT DELETE START */
1375 
1376 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1377 
1378 	if (aes_ctx != NULL) {
1379 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1380 			ASSERT(aes_ctx->ac_keysched_len != 0);
1381 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1382 			kmem_free(aes_ctx->ac_keysched,
1383 			    aes_ctx->ac_keysched_len);
1384 		}
1385 		crypto_free_mode_ctx(aes_ctx);
1386 		ctx->cc_provider_private = NULL;
1387 	}
1388 
1389 /* EXPORT DELETE END */
1390 
1391 	return (CRYPTO_SUCCESS);
1392 }
1393 
1394 
1395 static int
1396 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1397     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1398     boolean_t is_encrypt_init)
1399 {
1400 	int rv = CRYPTO_SUCCESS;
1401 
1402 /* EXPORT DELETE START */
1403 
1404 	void *keysched;
1405 	size_t size;
1406 
1407 	if (template == NULL) {
1408 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1409 			return (CRYPTO_HOST_MEMORY);
1410 		/*
1411 		 * Initialize key schedule.
1412 		 * Key length is stored in the key.
1413 		 */
1414 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1415 			kmem_free(keysched, size);
1416 			return (rv);
1417 		}
1418 
1419 		aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1420 		aes_ctx->ac_keysched_len = size;
1421 	} else {
1422 		keysched = template;
1423 	}
1424 	aes_ctx->ac_keysched = keysched;
1425 
1426 	switch (mechanism->cm_type) {
1427 	case AES_CBC_MECH_INFO_TYPE:
1428 		rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1429 		    mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1430 		break;
1431 	case AES_CTR_MECH_INFO_TYPE: {
1432 		CK_AES_CTR_PARAMS *pp;
1433 
1434 		if (mechanism->cm_param == NULL ||
1435 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1436 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1437 		}
1438 		pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1439 		rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1440 		    pp->cb, aes_copy_block);
1441 		break;
1442 	}
1443 	case AES_CCM_MECH_INFO_TYPE:
1444 		if (mechanism->cm_param == NULL ||
1445 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1446 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1447 		}
1448 		rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1449 		    kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1450 		    aes_xor_block);
1451 		break;
1452 	case AES_GCM_MECH_INFO_TYPE:
1453 		if (mechanism->cm_param == NULL ||
1454 		    mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1455 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1456 		}
1457 		rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1458 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1459 		    aes_xor_block);
1460 		break;
1461 	case AES_GMAC_MECH_INFO_TYPE:
1462 		if (mechanism->cm_param == NULL ||
1463 		    mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1464 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1465 		}
1466 		rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1467 		    AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1468 		    aes_xor_block);
1469 		break;
1470 	case AES_ECB_MECH_INFO_TYPE:
1471 		aes_ctx->ac_flags |= ECB_MODE;
1472 	}
1473 
1474 	if (rv != CRYPTO_SUCCESS) {
1475 		if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1476 			bzero(keysched, size);
1477 			kmem_free(keysched, size);
1478 		}
1479 	}
1480 
1481 /* EXPORT DELETE END */
1482 
1483 	return (rv);
1484 }
1485 
1486 static int
1487 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1488     CK_AES_GCM_PARAMS *gcm_params)
1489 {
1490 	/* LINTED: pointer alignment */
1491 	CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1492 
1493 	if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1494 		return (CRYPTO_MECHANISM_INVALID);
1495 
1496 	if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1497 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1498 
1499 	if (params->pIv == NULL)
1500 		return (CRYPTO_MECHANISM_PARAM_INVALID);
1501 
1502 	gcm_params->pIv = params->pIv;
1503 	gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1504 	gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1505 
1506 	if (data == NULL)
1507 		return (CRYPTO_SUCCESS);
1508 
1509 	if (data->cd_format != CRYPTO_DATA_RAW)
1510 		return (CRYPTO_ARGUMENTS_BAD);
1511 
1512 	gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1513 	gcm_params->ulAADLen = data->cd_length;
1514 	return (CRYPTO_SUCCESS);
1515 }
1516 
1517 static int
1518 aes_mac_atomic(crypto_provider_handle_t provider,
1519     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1520     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1521     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1522 {
1523 	CK_AES_GCM_PARAMS gcm_params;
1524 	crypto_mechanism_t gcm_mech;
1525 	int rv;
1526 
1527 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1528 	    != CRYPTO_SUCCESS)
1529 		return (rv);
1530 
1531 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1532 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1533 	gcm_mech.cm_param = (char *)&gcm_params;
1534 
1535 	return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1536 	    key, &null_crypto_data, mac, template, req));
1537 }
1538 
1539 static int
1540 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1541     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1542     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1543     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1544 {
1545 	CK_AES_GCM_PARAMS gcm_params;
1546 	crypto_mechanism_t gcm_mech;
1547 	int rv;
1548 
1549 	if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1550 	    != CRYPTO_SUCCESS)
1551 		return (rv);
1552 
1553 	gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1554 	gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1555 	gcm_mech.cm_param = (char *)&gcm_params;
1556 
1557 	return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1558 	    key, mac, &null_crypto_data, template, req));
1559 }
1560