xref: /titanic_41/usr/src/uts/common/crypto/io/aes.c (revision 8e50dcc9f00b393d43e6aa42b820bcbf1d3e1ce4)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #pragma ident	"%Z%%M%	%I%	%E% SMI"
27 
28 /*
29  * AES provider for the Kernel Cryptographic Framework (KCF)
30  */
31 
32 #include <sys/types.h>
33 #include <sys/systm.h>
34 #include <sys/modctl.h>
35 #include <sys/cmn_err.h>
36 #include <sys/ddi.h>
37 #include <sys/crypto/common.h>
38 #include <sys/crypto/spi.h>
39 #include <sys/sysmacros.h>
40 #include <sys/strsun.h>
41 #include <aes_impl.h>
42 #include <aes_cbc_crypt.h>
43 
44 extern struct mod_ops mod_cryptoops;
45 
46 /*
47  * Module linkage information for the kernel.
48  */
49 static struct modlcrypto modlcrypto = {
50 	&mod_cryptoops,
51 	"AES Kernel SW Provider"
52 };
53 
54 static struct modlinkage modlinkage = {
55 	MODREV_1,
56 	(void *)&modlcrypto,
57 	NULL
58 };
59 
60 /*
61  * CSPI information (entry points, provider info, etc.)
62  */
63 typedef enum aes_mech_type {
64 	AES_ECB_MECH_INFO_TYPE,		/* SUN_CKM_AES_ECB */
65 	AES_CBC_MECH_INFO_TYPE,		/* SUN_CKM_AES_CBC */
66 	AES_CBC_PAD_MECH_INFO_TYPE,	/* SUN_CKM_AES_CBC_PAD */
67 	AES_CTR_MECH_INFO_TYPE,		/* SUN_CKM_AES_CTR */
68 	AES_CCM_MECH_INFO_TYPE		/* SUN_CKM_AES_CCM */
69 } aes_mech_type_t;
70 
71 /*
72  * The following definitions are to keep EXPORT_SRC happy.
73  */
74 #ifndef AES_MIN_KEY_BYTES
75 #define	AES_MIN_KEY_BYTES		0
76 #endif
77 
78 #ifndef AES_MAX_KEY_BYTES
79 #define	AES_MAX_KEY_BYTES		0
80 #endif
81 
82 /*
83  * Mechanism info structure passed to KCF during registration.
84  */
85 static crypto_mech_info_t aes_mech_info_tab[] = {
86 	/* AES_ECB */
87 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
88 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
89 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
90 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
91 	/* AES_CBC */
92 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
93 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
94 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
95 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
96 	/* AES_CTR */
97 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
98 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
99 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
100 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
101 	/* AES_CCM */
102 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
103 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
104 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
105 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
106 };
107 
108 /* operations are in-place if the output buffer is NULL */
109 #define	AES_ARG_INPLACE(input, output)				\
110 	if ((output) == NULL)					\
111 		(output) = (input);
112 
113 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
114 
115 static crypto_control_ops_t aes_control_ops = {
116 	aes_provider_status
117 };
118 
119 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
120     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
121 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
122     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
123 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
124     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
125 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
126     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
127 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
128     crypto_req_handle_t);
129 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
130     crypto_req_handle_t);
131 
132 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
133     crypto_req_handle_t);
134 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
135     crypto_data_t *, crypto_req_handle_t);
136 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
137     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
138     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
139 
140 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
141     crypto_req_handle_t);
142 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
143     crypto_data_t *, crypto_req_handle_t);
144 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
145     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
146     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
147 
148 static crypto_cipher_ops_t aes_cipher_ops = {
149 	aes_encrypt_init,
150 	aes_encrypt,
151 	aes_encrypt_update,
152 	aes_encrypt_final,
153 	aes_encrypt_atomic,
154 	aes_decrypt_init,
155 	aes_decrypt,
156 	aes_decrypt_update,
157 	aes_decrypt_final,
158 	aes_decrypt_atomic
159 };
160 
161 static int aes_create_ctx_template(crypto_provider_handle_t,
162     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
163     size_t *, crypto_req_handle_t);
164 static int aes_free_context(crypto_ctx_t *);
165 
166 static crypto_ctx_ops_t aes_ctx_ops = {
167 	aes_create_ctx_template,
168 	aes_free_context
169 };
170 
171 static crypto_ops_t aes_crypto_ops = {
172 	&aes_control_ops,
173 	NULL,
174 	&aes_cipher_ops,
175 	NULL,
176 	NULL,
177 	NULL,
178 	NULL,
179 	NULL,
180 	NULL,
181 	NULL,
182 	NULL,
183 	NULL,
184 	NULL,
185 	&aes_ctx_ops
186 };
187 
188 static crypto_provider_info_t aes_prov_info = {
189 	CRYPTO_SPI_VERSION_1,
190 	"AES Software Provider",
191 	CRYPTO_SW_PROVIDER,
192 	{&modlinkage},
193 	NULL,
194 	&aes_crypto_ops,
195 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
196 	aes_mech_info_tab
197 };
198 
199 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
200 
201 int
202 _init(void)
203 {
204 	int ret;
205 
206 	/*
207 	 * Register with KCF. If the registration fails, return error.
208 	 */
209 	if ((ret = crypto_register_provider(&aes_prov_info,
210 	    &aes_prov_handle)) != CRYPTO_SUCCESS) {
211 		cmn_err(CE_WARN, "%s _init: crypto_register_provider()"
212 		    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
213 		return (EACCES);
214 	}
215 
216 	if ((ret = mod_install(&modlinkage)) != 0) {
217 		int rv;
218 
219 		ASSERT(aes_prov_handle != NULL);
220 		/* We should not return if the unregister returns busy. */
221 		while ((rv = crypto_unregister_provider(aes_prov_handle))
222 		    == CRYPTO_BUSY) {
223 			cmn_err(CE_WARN,
224 			    "%s _init: crypto_unregister_provider() "
225 			    "failed (0x%x). Retrying.",
226 			    CRYPTO_PROVIDER_NAME, rv);
227 			/* wait 10 seconds and try again. */
228 			delay(10 * drv_usectohz(1000000));
229 		}
230 	}
231 
232 	return (ret);
233 }
234 
235 int
236 _fini(void)
237 {
238 	int ret;
239 
240 	/*
241 	 * Unregister from KCF if previous registration succeeded.
242 	 */
243 	if (aes_prov_handle != NULL) {
244 		if ((ret = crypto_unregister_provider(aes_prov_handle)) !=
245 		    CRYPTO_SUCCESS) {
246 			cmn_err(CE_WARN,
247 			    "%s _fini: crypto_unregister_provider() "
248 			    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
249 			return (EBUSY);
250 		}
251 		aes_prov_handle = NULL;
252 	}
253 
254 	return (mod_remove(&modlinkage));
255 }
256 
257 int
258 _info(struct modinfo *modinfop)
259 {
260 	return (mod_info(&modlinkage, modinfop));
261 }
262 
263 
264 static int
265 aes_check_mech_param(crypto_mechanism_t *mechanism)
266 {
267 	int rv = CRYPTO_SUCCESS;
268 
269 	switch (mechanism->cm_type) {
270 	case AES_ECB_MECH_INFO_TYPE:
271 		/* no parameter */
272 		break;
273 	case AES_CBC_MECH_INFO_TYPE:
274 		if (mechanism->cm_param != NULL &&
275 		    mechanism->cm_param_len != AES_BLOCK_LEN)
276 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
277 		break;
278 	case AES_CTR_MECH_INFO_TYPE:
279 		if (mechanism->cm_param != NULL &&
280 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS))
281 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
282 		break;
283 	case AES_CCM_MECH_INFO_TYPE:
284 		if (mechanism->cm_param != NULL &&
285 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS))
286 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
287 		break;
288 	default:
289 		rv = CRYPTO_MECHANISM_INVALID;
290 	}
291 	return (rv);
292 }
293 
294 /* EXPORT DELETE START */
295 
296 /*
297  * Initialize key schedules for AES
298  */
299 static int
300 init_keysched(crypto_key_t *key, void *newbie)
301 {
302 	/*
303 	 * Only keys by value are supported by this module.
304 	 */
305 	switch (key->ck_format) {
306 	case CRYPTO_KEY_RAW:
307 		if (key->ck_length < AES_MINBITS ||
308 		    key->ck_length > AES_MAXBITS) {
309 			return (CRYPTO_KEY_SIZE_RANGE);
310 		}
311 
312 		/* key length must be either 128, 192, or 256 */
313 		if ((key->ck_length & 63) != 0)
314 			return (CRYPTO_KEY_SIZE_RANGE);
315 		break;
316 	default:
317 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
318 	}
319 
320 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
321 	return (CRYPTO_SUCCESS);
322 }
323 
324 /* EXPORT DELETE END */
325 
326 /*
327  * KCF software provider control entry points.
328  */
329 /* ARGSUSED */
330 static void
331 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
332 {
333 	*status = CRYPTO_PROVIDER_READY;
334 }
335 
336 static int
337 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
338     crypto_key_t *key, crypto_spi_ctx_template_t template,
339     crypto_req_handle_t req) {
340 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
341 }
342 
343 static int
344 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
345     crypto_key_t *key, crypto_spi_ctx_template_t template,
346     crypto_req_handle_t req) {
347 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
348 }
349 
350 
351 
352 /*
353  * KCF software provider encrypt entry points.
354  */
355 static int
356 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
357     crypto_key_t *key, crypto_spi_ctx_template_t template,
358     crypto_req_handle_t req, boolean_t is_encrypt_init)
359 {
360 
361 /* EXPORT DELETE START */
362 
363 	aes_ctx_t *aes_ctx;
364 	int rv;
365 	int kmflag;
366 
367 	/*
368 	 * Only keys by value are supported by this module.
369 	 */
370 	if (key->ck_format != CRYPTO_KEY_RAW) {
371 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
372 	}
373 
374 	if ((rv = aes_check_mech_param(mechanism)) != CRYPTO_SUCCESS)
375 		return (rv);
376 
377 	/*
378 	 * Allocate an AES context.
379 	 */
380 	kmflag = crypto_kmflag(req);
381 	if ((aes_ctx = kmem_zalloc(sizeof (aes_ctx_t), kmflag)) == NULL)
382 		return (CRYPTO_HOST_MEMORY);
383 
384 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
385 	    is_encrypt_init);
386 	if (rv != CRYPTO_SUCCESS) {
387 		kmem_free(aes_ctx, sizeof (aes_ctx_t));
388 		return (rv);
389 	}
390 
391 	ctx->cc_provider_private = aes_ctx;
392 
393 /* EXPORT DELETE END */
394 
395 	return (CRYPTO_SUCCESS);
396 }
397 
398 /*
399  * Helper AES encrypt update function for iov input data.
400  */
401 static int
402 aes_cipher_update_iov(aes_ctx_t *aes_ctx, crypto_data_t *input,
403     crypto_data_t *output, int (*cipher)(aes_ctx_t *, caddr_t, size_t,
404     crypto_data_t *))
405 {
406 	int rv;
407 /* EXPORT DELETE START */
408 
409 	if (input->cd_miscdata != NULL) {
410 		if (IS_P2ALIGNED(input->cd_miscdata, sizeof (uint64_t))) {
411 			/* LINTED: pointer alignment */
412 			aes_ctx->ac_iv[0] = *(uint64_t *)input->cd_miscdata;
413 			/* LINTED: pointer alignment */
414 			aes_ctx->ac_iv[1] = *(uint64_t *)&input->cd_miscdata[8];
415 		} else {
416 			uint8_t *miscdata8 = (uint8_t *)&input->cd_miscdata[0];
417 			uint8_t *iv8 = (uint8_t *)&aes_ctx->ac_iv[0];
418 
419 			AES_COPY_BLOCK(miscdata8, iv8);
420 		}
421 	}
422 
423 	if (input->cd_raw.iov_len < input->cd_length)
424 		return (CRYPTO_ARGUMENTS_BAD);
425 
426 	rv = (cipher)(aes_ctx, input->cd_raw.iov_base + input->cd_offset,
427 	    input->cd_length, (input == output) ? NULL : output);
428 
429 /* EXPORT DELETE END */
430 
431 	return (rv);
432 }
433 
434 /*
435  * Helper AES encrypt update function for uio input data.
436  */
437 static int
438 aes_cipher_update_uio(aes_ctx_t *aes_ctx, crypto_data_t *input,
439     crypto_data_t *output, int (*cipher)(aes_ctx_t *, caddr_t, size_t,
440     crypto_data_t *))
441 {
442 /* EXPORT DELETE START */
443 	uio_t *uiop = input->cd_uio;
444 	off_t offset = input->cd_offset;
445 	size_t length = input->cd_length;
446 	uint_t vec_idx;
447 	size_t cur_len;
448 
449 	if (input->cd_miscdata != NULL) {
450 		if (IS_P2ALIGNED(input->cd_miscdata, sizeof (uint64_t))) {
451 			/* LINTED: pointer alignment */
452 			aes_ctx->ac_iv[0] = *(uint64_t *)input->cd_miscdata;
453 			/* LINTED: pointer alignment */
454 			aes_ctx->ac_iv[1] = *(uint64_t *)&input->cd_miscdata[8];
455 		} else {
456 			uint8_t *miscdata8 = (uint8_t *)&input->cd_miscdata[0];
457 			uint8_t *iv8 = (uint8_t *)&aes_ctx->ac_iv[0];
458 
459 			AES_COPY_BLOCK(miscdata8, iv8);
460 		}
461 	}
462 
463 	if (input->cd_uio->uio_segflg != UIO_SYSSPACE) {
464 		return (CRYPTO_ARGUMENTS_BAD);
465 	}
466 
467 	/*
468 	 * Jump to the first iovec containing data to be
469 	 * processed.
470 	 */
471 	for (vec_idx = 0; vec_idx < uiop->uio_iovcnt &&
472 	    offset >= uiop->uio_iov[vec_idx].iov_len;
473 	    offset -= uiop->uio_iov[vec_idx++].iov_len)
474 		;
475 	if (vec_idx == uiop->uio_iovcnt) {
476 		/*
477 		 * The caller specified an offset that is larger than the
478 		 * total size of the buffers it provided.
479 		 */
480 		return (CRYPTO_DATA_LEN_RANGE);
481 	}
482 
483 	/*
484 	 * Now process the iovecs.
485 	 */
486 	while (vec_idx < uiop->uio_iovcnt && length > 0) {
487 		cur_len = MIN(uiop->uio_iov[vec_idx].iov_len -
488 		    offset, length);
489 
490 		(cipher)(aes_ctx, uiop->uio_iov[vec_idx].iov_base + offset,
491 		    cur_len, (input == output) ? NULL : output);
492 
493 		length -= cur_len;
494 		vec_idx++;
495 		offset = 0;
496 	}
497 
498 	if (vec_idx == uiop->uio_iovcnt && length > 0) {
499 		/*
500 		 * The end of the specified iovec's was reached but
501 		 * the length requested could not be processed, i.e.
502 		 * The caller requested to digest more data than it provided.
503 		 */
504 
505 		return (CRYPTO_DATA_LEN_RANGE);
506 	}
507 
508 /* EXPORT DELETE END */
509 
510 	return (CRYPTO_SUCCESS);
511 }
512 
513 /*
514  * Helper AES encrypt update function for mblk input data.
515  */
516 static int
517 aes_cipher_update_mp(aes_ctx_t *aes_ctx, crypto_data_t *input,
518     crypto_data_t *output, int (*cipher)(aes_ctx_t *, caddr_t, size_t,
519     crypto_data_t *))
520 {
521 /* EXPORT DELETE START */
522 	off_t offset = input->cd_offset;
523 	size_t length = input->cd_length;
524 	mblk_t *mp;
525 	size_t cur_len;
526 
527 	if (input->cd_miscdata != NULL) {
528 		if (IS_P2ALIGNED(input->cd_miscdata, sizeof (uint64_t))) {
529 			/* LINTED: pointer alignment */
530 			aes_ctx->ac_iv[0] = *(uint64_t *)input->cd_miscdata;
531 			/* LINTED: pointer alignment */
532 			aes_ctx->ac_iv[1] = *(uint64_t *)&input->cd_miscdata[8];
533 		} else {
534 			uint8_t *miscdata8 = (uint8_t *)&input->cd_miscdata[0];
535 			uint8_t *iv8 = (uint8_t *)&aes_ctx->ac_iv[0];
536 
537 			AES_COPY_BLOCK(miscdata8, iv8);
538 		}
539 	}
540 
541 	/*
542 	 * Jump to the first mblk_t containing data to be processed.
543 	 */
544 	for (mp = input->cd_mp; mp != NULL && offset >= MBLKL(mp);
545 	    offset -= MBLKL(mp), mp = mp->b_cont)
546 		;
547 	if (mp == NULL) {
548 		/*
549 		 * The caller specified an offset that is larger than the
550 		 * total size of the buffers it provided.
551 		 */
552 		return (CRYPTO_DATA_LEN_RANGE);
553 	}
554 
555 	/*
556 	 * Now do the processing on the mblk chain.
557 	 */
558 	while (mp != NULL && length > 0) {
559 		cur_len = MIN(MBLKL(mp) - offset, length);
560 		(cipher)(aes_ctx, (char *)(mp->b_rptr + offset), cur_len,
561 		    (input == output) ? NULL : output);
562 
563 		length -= cur_len;
564 		offset = 0;
565 		mp = mp->b_cont;
566 	}
567 
568 	if (mp == NULL && length > 0) {
569 		/*
570 		 * The end of the mblk was reached but the length requested
571 		 * could not be processed, i.e. The caller requested
572 		 * to digest more data than it provided.
573 		 */
574 		return (CRYPTO_DATA_LEN_RANGE);
575 	}
576 
577 /* EXPORT DELETE END */
578 
579 	return (CRYPTO_SUCCESS);
580 }
581 
582 /* ARGSUSED */
583 static int
584 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
585     crypto_data_t *ciphertext, crypto_req_handle_t req)
586 {
587 	int ret = CRYPTO_FAILED;
588 
589 /* EXPORT DELETE START */
590 
591 	aes_ctx_t *aes_ctx;
592 	size_t saved_length, saved_offset, length_needed;
593 
594 	ASSERT(ctx->cc_provider_private != NULL);
595 	aes_ctx = ctx->cc_provider_private;
596 
597 	/*
598 	 * For block ciphers, plaintext must be a multiple of AES block size.
599 	 * This test is only valid for ciphers whose blocksize is a power of 2.
600 	 * Even though AES CCM mode is a block cipher, it does not
601 	 * require the plaintext to be a multiple of AES block size.
602 	 * The length requirement for AES CCM mode has already been checked
603 	 * at init time
604 	 */
605 	if (((aes_ctx->ac_flags & AES_CTR_MODE) == 0) &&
606 	    ((aes_ctx->ac_flags & AES_CCM_MODE) == 0) &&
607 	    (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
608 		return (CRYPTO_DATA_LEN_RANGE);
609 
610 	AES_ARG_INPLACE(plaintext, ciphertext);
611 
612 	/*
613 	 * We need to just return the length needed to store the output.
614 	 * We should not destroy the context for the following case.
615 	 */
616 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
617 		length_needed = plaintext->cd_length + aes_ctx->ac_ccm_mac_len;
618 	} else {
619 		length_needed = plaintext->cd_length;
620 	}
621 
622 	if (ciphertext->cd_length < length_needed) {
623 		ciphertext->cd_length = length_needed;
624 		return (CRYPTO_BUFFER_TOO_SMALL);
625 	}
626 
627 	saved_length = ciphertext->cd_length;
628 	saved_offset = ciphertext->cd_offset;
629 
630 	/*
631 	 * Do an update on the specified input data.
632 	 */
633 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
634 	if (ret != CRYPTO_SUCCESS) {
635 		return (ret);
636 	}
637 
638 	/*
639 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
640 	 * left-over unprocessed data, and compute the MAC
641 	 */
642 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
643 		/*
644 		 * aes_ccm_encrypt_final() will compute the MAC and append
645 		 * it to existing ciphertext. So, need to adjust the left over
646 		 * length value accordingly
647 		 */
648 
649 		/* order of following 2 lines MUST not be reversed */
650 		ciphertext->cd_offset = ciphertext->cd_length;
651 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
652 		ret = aes_ccm_encrypt_final(aes_ctx, ciphertext);
653 		if (ret != CRYPTO_SUCCESS) {
654 			return (ret);
655 		}
656 
657 		if (plaintext != ciphertext) {
658 			ciphertext->cd_length =
659 			    ciphertext->cd_offset - saved_offset;
660 		}
661 		ciphertext->cd_offset = saved_offset;
662 	}
663 
664 	ASSERT(aes_ctx->ac_remainder_len == 0);
665 	(void) aes_free_context(ctx);
666 
667 /* EXPORT DELETE END */
668 
669 	/* LINTED */
670 	return (ret);
671 }
672 
673 /* ARGSUSED */
674 static int
675 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
676     crypto_data_t *plaintext, crypto_req_handle_t req)
677 {
678 	int ret = CRYPTO_FAILED;
679 
680 /* EXPORT DELETE START */
681 
682 	aes_ctx_t *aes_ctx;
683 	off_t saved_offset;
684 	size_t saved_length;
685 
686 	ASSERT(ctx->cc_provider_private != NULL);
687 	aes_ctx = ctx->cc_provider_private;
688 
689 	/*
690 	 * For block ciphers, plaintext must be a multiple of AES block size.
691 	 * This test is only valid for ciphers whose blocksize is a power of 2.
692 	 * Even though AES CCM mode is a block cipher, it does not
693 	 * require the plaintext to be a multiple of AES block size.
694 	 * The length requirement for AES CCM mode has already been checked
695 	 * at init time
696 	 */
697 	if (((aes_ctx->ac_flags & AES_CTR_MODE) == 0) &&
698 	    ((aes_ctx->ac_flags & AES_CCM_MODE) == 0) &&
699 	    (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
700 		return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
701 	}
702 
703 	AES_ARG_INPLACE(ciphertext, plaintext);
704 
705 	/*
706 	 * We need to just return the length needed to store the output.
707 	 * We should not destroy the context for the following case.
708 	 *
709 	 * For AES CCM mode, size of the plaintext will be MAC_SIZE
710 	 * smaller than size of the cipher text.
711 	 */
712 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
713 		if (plaintext->cd_length < aes_ctx->ac_ccm_data_len) {
714 			plaintext->cd_length = aes_ctx->ac_ccm_data_len;
715 			return (CRYPTO_BUFFER_TOO_SMALL);
716 		}
717 		saved_offset = plaintext->cd_offset;
718 		saved_length = plaintext->cd_length;
719 	} else if (plaintext->cd_length < ciphertext->cd_length) {
720 		plaintext->cd_length = ciphertext->cd_length;
721 		return (CRYPTO_BUFFER_TOO_SMALL);
722 	}
723 
724 	/*
725 	 * Do an update on the specified input data.
726 	 */
727 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
728 	if (ret != CRYPTO_SUCCESS) {
729 		goto cleanup;
730 	}
731 
732 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
733 		ASSERT(aes_ctx->ac_ccm_processed_data_len
734 		    == aes_ctx->ac_ccm_data_len);
735 		ASSERT(aes_ctx->ac_ccm_processed_mac_len
736 		    == aes_ctx->ac_ccm_mac_len);
737 
738 		/* order of following 2 lines MUST not be reversed */
739 		plaintext->cd_offset = plaintext->cd_length;
740 		plaintext->cd_length = saved_length - plaintext->cd_length;
741 
742 		ret = aes_ccm_decrypt_final(aes_ctx, plaintext);
743 		if (ret == CRYPTO_SUCCESS) {
744 			if (plaintext != ciphertext) {
745 				plaintext->cd_length =
746 				    plaintext->cd_offset - saved_offset;
747 			}
748 		} else {
749 			plaintext->cd_length = saved_length;
750 		}
751 
752 		plaintext->cd_offset = saved_offset;
753 	}
754 
755 	ASSERT(aes_ctx->ac_remainder_len == 0);
756 
757 cleanup:
758 	if (aes_ctx->ac_ccm_pt_buf) {
759 		kmem_free(aes_ctx->ac_ccm_pt_buf, aes_ctx->ac_ccm_data_len);
760 	}
761 	(void) aes_free_context(ctx);
762 
763 /* EXPORT DELETE END */
764 
765 	/* LINTED */
766 	return (ret);
767 }
768 
769 /* ARGSUSED */
770 static int
771 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
772     crypto_data_t *ciphertext, crypto_req_handle_t req)
773 {
774 	off_t saved_offset;
775 	size_t saved_length, out_len;
776 	int ret = CRYPTO_SUCCESS;
777 	aes_ctx_t *aes_ctx;
778 
779 	ASSERT(ctx->cc_provider_private != NULL);
780 
781 	AES_ARG_INPLACE(plaintext, ciphertext);
782 
783 	/* compute number of bytes that will hold the ciphertext */
784 	out_len = ((aes_ctx_t *)ctx->cc_provider_private)->ac_remainder_len;
785 	out_len += plaintext->cd_length;
786 	out_len &= ~(AES_BLOCK_LEN - 1);
787 
788 	/* return length needed to store the output */
789 	if (ciphertext->cd_length < out_len) {
790 		ciphertext->cd_length = out_len;
791 		return (CRYPTO_BUFFER_TOO_SMALL);
792 	}
793 
794 	saved_offset = ciphertext->cd_offset;
795 	saved_length = ciphertext->cd_length;
796 
797 
798 	/*
799 	 * Do the AES update on the specified input data.
800 	 */
801 	switch (plaintext->cd_format) {
802 	case CRYPTO_DATA_RAW:
803 		ret = aes_cipher_update_iov(ctx->cc_provider_private,
804 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks);
805 		break;
806 	case CRYPTO_DATA_UIO:
807 		ret = aes_cipher_update_uio(ctx->cc_provider_private,
808 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks);
809 		break;
810 	case CRYPTO_DATA_MBLK:
811 		ret = aes_cipher_update_mp(ctx->cc_provider_private,
812 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks);
813 		break;
814 	default:
815 		ret = CRYPTO_ARGUMENTS_BAD;
816 	}
817 
818 	/*
819 	 * Since AES counter mode is a stream cipher, we call
820 	 * aes_counter_final() to pick up any remaining bytes.
821 	 * It is an internal function that does not destroy
822 	 * the context like *normal* final routines.
823 	 */
824 	aes_ctx = ctx->cc_provider_private;
825 	if ((aes_ctx->ac_flags & AES_CTR_MODE) &&
826 	    (aes_ctx->ac_remainder_len > 0)) {
827 		ret = aes_counter_final(aes_ctx, ciphertext);
828 	}
829 
830 	if (ret == CRYPTO_SUCCESS) {
831 		if (plaintext != ciphertext)
832 			ciphertext->cd_length =
833 			    ciphertext->cd_offset - saved_offset;
834 	} else {
835 		ciphertext->cd_length = saved_length;
836 	}
837 	ciphertext->cd_offset = saved_offset;
838 
839 	return (ret);
840 }
841 
842 /* ARGSUSED */
843 static int
844 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
845     crypto_data_t *plaintext, crypto_req_handle_t req)
846 {
847 	off_t saved_offset;
848 	size_t saved_length, out_len;
849 	int ret = CRYPTO_SUCCESS;
850 	aes_ctx_t *aes_ctx;
851 
852 	ASSERT(ctx->cc_provider_private != NULL);
853 
854 	AES_ARG_INPLACE(ciphertext, plaintext);
855 
856 	/* compute number of bytes that will hold the plaintext */
857 	out_len = ((aes_ctx_t *)ctx->cc_provider_private)->ac_remainder_len;
858 	out_len += ciphertext->cd_length;
859 	out_len &= ~(AES_BLOCK_LEN - 1);
860 
861 	/* return length needed to store the output */
862 	if (plaintext->cd_length < out_len) {
863 		plaintext->cd_length = out_len;
864 		return (CRYPTO_BUFFER_TOO_SMALL);
865 	}
866 
867 	saved_offset = plaintext->cd_offset;
868 	saved_length = plaintext->cd_length;
869 
870 	/*
871 	 * Do the AES update on the specified input data.
872 	 */
873 	switch (ciphertext->cd_format) {
874 	case CRYPTO_DATA_RAW:
875 		ret = aes_cipher_update_iov(ctx->cc_provider_private,
876 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks);
877 		break;
878 	case CRYPTO_DATA_UIO:
879 		ret = aes_cipher_update_uio(ctx->cc_provider_private,
880 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks);
881 		break;
882 	case CRYPTO_DATA_MBLK:
883 		ret = aes_cipher_update_mp(ctx->cc_provider_private,
884 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks);
885 		break;
886 	default:
887 		ret = CRYPTO_ARGUMENTS_BAD;
888 	}
889 
890 	/*
891 	 * Since AES counter mode is a stream cipher, we call
892 	 * aes_counter_final() to pick up any remaining bytes.
893 	 * It is an internal function that does not destroy
894 	 * the context like *normal* final routines.
895 	 */
896 	aes_ctx = ctx->cc_provider_private;
897 	if ((aes_ctx->ac_flags & AES_CTR_MODE) &&
898 	    (aes_ctx->ac_remainder_len > 0)) {
899 		ret = aes_counter_final(aes_ctx, plaintext);
900 	}
901 
902 	if (ret == CRYPTO_SUCCESS) {
903 		if (ciphertext != plaintext)
904 			plaintext->cd_length =
905 			    plaintext->cd_offset - saved_offset;
906 	} else {
907 		plaintext->cd_length = saved_length;
908 	}
909 	plaintext->cd_offset = saved_offset;
910 
911 
912 	return (ret);
913 }
914 
915 /* ARGSUSED */
916 static int
917 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
918     crypto_req_handle_t req)
919 {
920 
921 /* EXPORT DELETE START */
922 
923 	aes_ctx_t *aes_ctx;
924 	int ret;
925 
926 	ASSERT(ctx->cc_provider_private != NULL);
927 	aes_ctx = ctx->cc_provider_private;
928 
929 	if (data->cd_format != CRYPTO_DATA_RAW &&
930 	    data->cd_format != CRYPTO_DATA_UIO &&
931 	    data->cd_format != CRYPTO_DATA_MBLK) {
932 		return (CRYPTO_ARGUMENTS_BAD);
933 	}
934 
935 	if (aes_ctx->ac_flags & AES_CTR_MODE) {
936 		if (aes_ctx->ac_remainder_len > 0) {
937 			ret = aes_counter_final(aes_ctx, data);
938 			if (ret != CRYPTO_SUCCESS)
939 				return (ret);
940 		}
941 	} else if (aes_ctx->ac_flags & AES_CCM_MODE) {
942 		ret = aes_ccm_encrypt_final(aes_ctx, data);
943 		if (ret != CRYPTO_SUCCESS) {
944 			return (ret);
945 		}
946 	} else {
947 		/*
948 		 * There must be no unprocessed plaintext.
949 		 * This happens if the length of the last data is
950 		 * not a multiple of the AES block length.
951 		 */
952 		if (aes_ctx->ac_remainder_len > 0) {
953 			return (CRYPTO_DATA_LEN_RANGE);
954 		}
955 		data->cd_length = 0;
956 	}
957 
958 	(void) aes_free_context(ctx);
959 
960 /* EXPORT DELETE END */
961 
962 	return (CRYPTO_SUCCESS);
963 }
964 
965 /* ARGSUSED */
966 static int
967 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
968     crypto_req_handle_t req)
969 {
970 
971 /* EXPORT DELETE START */
972 
973 	aes_ctx_t *aes_ctx;
974 	int ret;
975 	off_t saved_offset;
976 	size_t saved_length;
977 
978 	ASSERT(ctx->cc_provider_private != NULL);
979 	aes_ctx = ctx->cc_provider_private;
980 
981 	if (data->cd_format != CRYPTO_DATA_RAW &&
982 	    data->cd_format != CRYPTO_DATA_UIO &&
983 	    data->cd_format != CRYPTO_DATA_MBLK) {
984 		return (CRYPTO_ARGUMENTS_BAD);
985 	}
986 
987 	/*
988 	 * There must be no unprocessed ciphertext.
989 	 * This happens if the length of the last ciphertext is
990 	 * not a multiple of the AES block length.
991 	 */
992 	if (aes_ctx->ac_remainder_len > 0) {
993 		if ((aes_ctx->ac_flags & AES_CTR_MODE) == 0)
994 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
995 		else {
996 			ret = aes_counter_final(aes_ctx, data);
997 			if (ret != CRYPTO_SUCCESS)
998 				return (ret);
999 		}
1000 	}
1001 
1002 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
1003 		/*
1004 		 * This is where all the plaintext is returned, make sure
1005 		 * the plaintext buffer is big enough
1006 		 */
1007 		size_t pt_len = aes_ctx->ac_ccm_data_len;
1008 		if (data->cd_length < pt_len) {
1009 			data->cd_length = pt_len;
1010 			return (CRYPTO_BUFFER_TOO_SMALL);
1011 		}
1012 
1013 		ASSERT(aes_ctx->ac_ccm_processed_data_len == pt_len);
1014 		ASSERT(aes_ctx->ac_ccm_processed_mac_len
1015 		    == aes_ctx->ac_ccm_mac_len);
1016 		saved_offset = data->cd_offset;
1017 		saved_length = data->cd_length;
1018 		ret = aes_ccm_decrypt_final(aes_ctx, data);
1019 		if (ret == CRYPTO_SUCCESS) {
1020 			data->cd_length = data->cd_offset - saved_offset;
1021 		} else {
1022 			data->cd_length = saved_length;
1023 		}
1024 
1025 		data->cd_offset = saved_offset;
1026 		if (ret != CRYPTO_SUCCESS) {
1027 			return (ret);
1028 		}
1029 	}
1030 
1031 
1032 	if (((aes_ctx->ac_flags & AES_CTR_MODE) == 0) &&
1033 	    ((aes_ctx->ac_flags & AES_CCM_MODE) == 0)) {
1034 		data->cd_length = 0;
1035 	}
1036 
1037 	if (aes_ctx->ac_ccm_pt_buf != NULL) {
1038 		kmem_free(aes_ctx->ac_ccm_pt_buf, aes_ctx->ac_ccm_data_len);
1039 	}
1040 
1041 	(void) aes_free_context(ctx);
1042 
1043 /* EXPORT DELETE END */
1044 
1045 	return (CRYPTO_SUCCESS);
1046 }
1047 
1048 /* ARGSUSED */
1049 static int
1050 aes_encrypt_atomic(crypto_provider_handle_t provider,
1051     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1052     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1053     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1054 {
1055 	aes_ctx_t aes_ctx;	/* on the stack */
1056 	off_t saved_offset;
1057 	size_t saved_length;
1058 	int ret;
1059 
1060 	AES_ARG_INPLACE(plaintext, ciphertext);
1061 
1062 	if ((mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) &&
1063 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)) {
1064 		/*
1065 		 * Plaintext must be a multiple of AES block size.
1066 		 * This test only works for non-padded mechanisms
1067 		 * when blocksize is 2^N.
1068 		 */
1069 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1070 			return (CRYPTO_DATA_LEN_RANGE);
1071 	}
1072 
1073 	/* return length needed to store the output */
1074 	if (ciphertext->cd_length < plaintext->cd_length) {
1075 		ciphertext->cd_length = plaintext->cd_length;
1076 		return (CRYPTO_BUFFER_TOO_SMALL);
1077 	}
1078 
1079 	if ((ret = aes_check_mech_param(mechanism)) != CRYPTO_SUCCESS)
1080 		return (ret);
1081 
1082 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1083 
1084 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1085 	    crypto_kmflag(req), B_TRUE);
1086 	if (ret != CRYPTO_SUCCESS)
1087 		return (ret);
1088 
1089 	if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1090 		size_t length_needed
1091 		    = plaintext->cd_length + aes_ctx.ac_ccm_mac_len;
1092 		if (ciphertext->cd_length < length_needed) {
1093 			ciphertext->cd_length = length_needed;
1094 			return (CRYPTO_BUFFER_TOO_SMALL);
1095 		}
1096 	}
1097 
1098 
1099 	saved_offset = ciphertext->cd_offset;
1100 	saved_length = ciphertext->cd_length;
1101 
1102 	/*
1103 	 * Do an update on the specified input data.
1104 	 */
1105 	switch (plaintext->cd_format) {
1106 	case CRYPTO_DATA_RAW:
1107 		ret = aes_cipher_update_iov(&aes_ctx, plaintext, ciphertext,
1108 		    aes_encrypt_contiguous_blocks);
1109 		break;
1110 	case CRYPTO_DATA_UIO:
1111 		ret = aes_cipher_update_uio(&aes_ctx, plaintext, ciphertext,
1112 		    aes_encrypt_contiguous_blocks);
1113 		break;
1114 	case CRYPTO_DATA_MBLK:
1115 		ret = aes_cipher_update_mp(&aes_ctx, plaintext, ciphertext,
1116 		    aes_encrypt_contiguous_blocks);
1117 		break;
1118 	default:
1119 		ret = CRYPTO_ARGUMENTS_BAD;
1120 	}
1121 
1122 	if (ret == CRYPTO_SUCCESS) {
1123 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1124 			ret = aes_ccm_encrypt_final(&aes_ctx, ciphertext);
1125 			if (ret != CRYPTO_SUCCESS)
1126 				goto out;
1127 			ASSERT(aes_ctx.ac_remainder_len == 0);
1128 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1129 			if (aes_ctx.ac_remainder_len > 0) {
1130 				ret = aes_counter_final(&aes_ctx, ciphertext);
1131 				if (ret != CRYPTO_SUCCESS)
1132 					goto out;
1133 			}
1134 		} else {
1135 			ASSERT(aes_ctx.ac_remainder_len == 0);
1136 		}
1137 
1138 		if (plaintext != ciphertext) {
1139 			ciphertext->cd_length =
1140 			    ciphertext->cd_offset - saved_offset;
1141 		}
1142 	} else {
1143 		ciphertext->cd_length = saved_length;
1144 	}
1145 	ciphertext->cd_offset = saved_offset;
1146 
1147 out:
1148 	if (aes_ctx.ac_flags & AES_PROVIDER_OWNS_KEY_SCHEDULE) {
1149 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1150 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1151 	}
1152 
1153 	return (ret);
1154 }
1155 
1156 /* ARGSUSED */
1157 static int
1158 aes_decrypt_atomic(crypto_provider_handle_t provider,
1159     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1160     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1161     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1162 {
1163 	aes_ctx_t aes_ctx;	/* on the stack */
1164 	off_t saved_offset;
1165 	size_t saved_length;
1166 	int ret;
1167 
1168 	AES_ARG_INPLACE(ciphertext, plaintext);
1169 
1170 	/*
1171 	 * For block ciphers, ciphertext must be a multiple of AES block size.
1172 	 * This test is only valid for non-padded mechanisms
1173 	 * when blocksize is 2^N
1174 	 * Even though AES CCM mode is a block cipher, it does not
1175 	 * require the plaintext to be a multiple of AES block size.
1176 	 * The length requirement for AES CCM mode will be checked
1177 	 * at init time
1178 	 */
1179 	if ((mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) &&
1180 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE) &&
1181 	    ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0))
1182 		return (CRYPTO_DATA_LEN_RANGE);
1183 
1184 	/*
1185 	 * return length needed to store the output, length requirement
1186 	 * for AES CCM mode can not be determined until later
1187 	 */
1188 	if ((plaintext->cd_length < ciphertext->cd_length) &&
1189 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)) {
1190 		plaintext->cd_length = ciphertext->cd_length;
1191 		return (CRYPTO_BUFFER_TOO_SMALL);
1192 	}
1193 
1194 
1195 	if ((ret = aes_check_mech_param(mechanism)) != CRYPTO_SUCCESS)
1196 		return (ret);
1197 
1198 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1199 
1200 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1201 	    crypto_kmflag(req), B_FALSE);
1202 	if (ret != CRYPTO_SUCCESS)
1203 		return (ret);
1204 
1205 	/* check length requirement for AES CCM mode now */
1206 	if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1207 		if (plaintext->cd_length < aes_ctx.ac_ccm_data_len) {
1208 			plaintext->cd_length = aes_ctx.ac_ccm_data_len;
1209 			ret = CRYPTO_BUFFER_TOO_SMALL;
1210 			goto out;
1211 		}
1212 	}
1213 
1214 	saved_offset = plaintext->cd_offset;
1215 	saved_length = plaintext->cd_length;
1216 
1217 	/*
1218 	 * Do an update on the specified input data.
1219 	 */
1220 	switch (ciphertext->cd_format) {
1221 	case CRYPTO_DATA_RAW:
1222 		ret = aes_cipher_update_iov(&aes_ctx, ciphertext, plaintext,
1223 		    aes_decrypt_contiguous_blocks);
1224 		break;
1225 	case CRYPTO_DATA_UIO:
1226 		ret = aes_cipher_update_uio(&aes_ctx, ciphertext, plaintext,
1227 		    aes_decrypt_contiguous_blocks);
1228 		break;
1229 	case CRYPTO_DATA_MBLK:
1230 		ret = aes_cipher_update_mp(&aes_ctx, ciphertext, plaintext,
1231 		    aes_decrypt_contiguous_blocks);
1232 		break;
1233 	default:
1234 		ret = CRYPTO_ARGUMENTS_BAD;
1235 	}
1236 
1237 	if (ret == CRYPTO_SUCCESS) {
1238 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1239 			ASSERT(aes_ctx.ac_ccm_processed_data_len
1240 			    == aes_ctx.ac_ccm_data_len);
1241 			ASSERT(aes_ctx.ac_ccm_processed_mac_len
1242 			    == aes_ctx.ac_ccm_mac_len);
1243 			ret = aes_ccm_decrypt_final(&aes_ctx, plaintext);
1244 			ASSERT(aes_ctx.ac_remainder_len == 0);
1245 			if ((ret == CRYPTO_SUCCESS) &&
1246 			    (ciphertext != plaintext)) {
1247 				plaintext->cd_length =
1248 				    plaintext->cd_offset - saved_offset;
1249 			} else {
1250 				plaintext->cd_length = saved_length;
1251 			}
1252 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1253 			ASSERT(aes_ctx.ac_remainder_len == 0);
1254 			if (ciphertext != plaintext)
1255 				plaintext->cd_length =
1256 				    plaintext->cd_offset - saved_offset;
1257 		} else {
1258 			if (aes_ctx.ac_remainder_len > 0) {
1259 				ret = aes_counter_final(&aes_ctx, plaintext);
1260 				if (ret != CRYPTO_SUCCESS)
1261 					goto out;
1262 			}
1263 			if (ciphertext != plaintext)
1264 				plaintext->cd_length =
1265 				    plaintext->cd_offset - saved_offset;
1266 		}
1267 	} else {
1268 		plaintext->cd_length = saved_length;
1269 	}
1270 	plaintext->cd_offset = saved_offset;
1271 
1272 out:
1273 	if (aes_ctx.ac_flags & AES_PROVIDER_OWNS_KEY_SCHEDULE) {
1274 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1275 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1276 	}
1277 
1278 	if (aes_ctx.ac_ccm_pt_buf != NULL) {
1279 		kmem_free(aes_ctx.ac_ccm_pt_buf, aes_ctx.ac_ccm_data_len);
1280 	}
1281 
1282 	return (ret);
1283 }
1284 
1285 /*
1286  * KCF software provider context template entry points.
1287  */
1288 /* ARGSUSED */
1289 static int
1290 aes_create_ctx_template(crypto_provider_handle_t provider,
1291     crypto_mechanism_t *mechanism, crypto_key_t *key,
1292     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1293 {
1294 
1295 /* EXPORT DELETE START */
1296 
1297 	void *keysched;
1298 	size_t size;
1299 	int rv;
1300 
1301 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1302 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1303 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1304 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)
1305 		return (CRYPTO_MECHANISM_INVALID);
1306 
1307 	if ((keysched = aes_alloc_keysched(&size,
1308 	    crypto_kmflag(req))) == NULL) {
1309 		return (CRYPTO_HOST_MEMORY);
1310 	}
1311 
1312 	/*
1313 	 * Initialize key schedule.  Key length information is stored
1314 	 * in the key.
1315 	 */
1316 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1317 		bzero(keysched, size);
1318 		kmem_free(keysched, size);
1319 		return (rv);
1320 	}
1321 
1322 	*tmpl = keysched;
1323 	*tmpl_size = size;
1324 
1325 /* EXPORT DELETE END */
1326 
1327 	return (CRYPTO_SUCCESS);
1328 }
1329 
1330 /* ARGSUSED */
1331 static int
1332 aes_free_context(crypto_ctx_t *ctx)
1333 {
1334 
1335 /* EXPORT DELETE START */
1336 
1337 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1338 
1339 	if (aes_ctx != NULL) {
1340 		if (aes_ctx->ac_flags & AES_PROVIDER_OWNS_KEY_SCHEDULE) {
1341 			ASSERT(aes_ctx->ac_keysched_len != 0);
1342 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1343 			kmem_free(aes_ctx->ac_keysched,
1344 			    aes_ctx->ac_keysched_len);
1345 		}
1346 		kmem_free(aes_ctx, sizeof (aes_ctx_t));
1347 		ctx->cc_provider_private = NULL;
1348 	}
1349 
1350 /* EXPORT DELETE END */
1351 
1352 	return (CRYPTO_SUCCESS);
1353 }
1354 
1355 /* ARGSUSED */
1356 static int
1357 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1358     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1359     boolean_t is_encrypt_init)
1360 {
1361 	int rv = CRYPTO_SUCCESS;
1362 
1363 /* EXPORT DELETE START */
1364 
1365 	void *keysched;
1366 	size_t size;
1367 	CK_AES_CCM_PARAMS *ccm_param = NULL;
1368 
1369 	aes_ctx->ac_flags = 0;
1370 
1371 	if (mechanism->cm_type == AES_CBC_MECH_INFO_TYPE) {
1372 		/*
1373 		 * Copy 128-bit IV into context.
1374 		 *
1375 		 * If cm_param == NULL then the IV comes from the
1376 		 * cd_miscdata field in the crypto_data structure.
1377 		 */
1378 		if (mechanism->cm_param != NULL) {
1379 			ASSERT(mechanism->cm_param_len == AES_BLOCK_LEN);
1380 			if (IS_P2ALIGNED(mechanism->cm_param,
1381 			    sizeof (uint64_t))) {
1382 				uint64_t *param64;
1383 				param64 = (uint64_t *)mechanism->cm_param;
1384 
1385 				aes_ctx->ac_iv[0] = *param64++;
1386 				aes_ctx->ac_iv[1] = *param64;
1387 			} else {
1388 				uint8_t *iv8;
1389 				uint8_t *p8;
1390 				iv8 = (uint8_t *)&aes_ctx->ac_iv;
1391 				p8 = (uint8_t *)&mechanism->cm_param[0];
1392 
1393 				iv8[0] = p8[0];
1394 				iv8[1] = p8[1];
1395 				iv8[2] = p8[2];
1396 				iv8[3] = p8[3];
1397 				iv8[4] = p8[4];
1398 				iv8[5] = p8[5];
1399 				iv8[6] = p8[6];
1400 				iv8[7] = p8[7];
1401 				iv8[8] = p8[8];
1402 				iv8[9] = p8[9];
1403 				iv8[10] = p8[10];
1404 				iv8[11] = p8[11];
1405 				iv8[12] = p8[12];
1406 				iv8[13] = p8[13];
1407 				iv8[14] = p8[14];
1408 				iv8[15] = p8[15];
1409 			}
1410 		}
1411 
1412 		aes_ctx->ac_lastp = (uint8_t *)&aes_ctx->ac_iv[0];
1413 		aes_ctx->ac_flags |= AES_CBC_MODE;
1414 
1415 	} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1416 		if (mechanism->cm_param != NULL) {
1417 			CK_AES_CTR_PARAMS *pp;
1418 			uint64_t mask = 0;
1419 			ulong_t count;
1420 			uint8_t *iv8;
1421 			uint8_t *p8;
1422 
1423 			/* XXX what to do about miscdata */
1424 			pp = (CK_AES_CTR_PARAMS *)mechanism->cm_param;
1425 			count = pp->ulCounterBits;
1426 			if (count == 0 || count > 64) {
1427 				return (CRYPTO_MECHANISM_PARAM_INVALID);
1428 			}
1429 			while (count-- > 0)
1430 				mask |= (1ULL << count);
1431 #ifdef _LITTLE_ENDIAN
1432 			p8 = (uint8_t *)&mask;
1433 			mask = (((uint64_t)p8[0] << 56) |
1434 			    ((uint64_t)p8[1] << 48) |
1435 			    ((uint64_t)p8[2] << 40) |
1436 			    ((uint64_t)p8[3] << 32) |
1437 			    ((uint64_t)p8[4] << 24) |
1438 			    ((uint64_t)p8[5] << 16) |
1439 			    ((uint64_t)p8[6] << 8) |
1440 			    (uint64_t)p8[7]);
1441 #endif
1442 			aes_ctx->ac_counter_mask = mask;
1443 
1444 			iv8 = (uint8_t *)&aes_ctx->ac_iv;
1445 			p8 = (uint8_t *)&pp->cb[0];
1446 
1447 			iv8[0] = p8[0];
1448 			iv8[1] = p8[1];
1449 			iv8[2] = p8[2];
1450 			iv8[3] = p8[3];
1451 			iv8[4] = p8[4];
1452 			iv8[5] = p8[5];
1453 			iv8[6] = p8[6];
1454 			iv8[7] = p8[7];
1455 			iv8[8] = p8[8];
1456 			iv8[9] = p8[9];
1457 			iv8[10] = p8[10];
1458 			iv8[11] = p8[11];
1459 			iv8[12] = p8[12];
1460 			iv8[13] = p8[13];
1461 			iv8[14] = p8[14];
1462 			iv8[15] = p8[15];
1463 		} else {
1464 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1465 		}
1466 
1467 		aes_ctx->ac_lastp = (uint8_t *)&aes_ctx->ac_iv[0];
1468 		aes_ctx->ac_flags |= AES_CTR_MODE;
1469 	} else if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1470 		if (mechanism->cm_param != NULL) {
1471 			int rc;
1472 
1473 			ccm_param = (CK_AES_CCM_PARAMS *)mechanism->cm_param;
1474 
1475 			if ((rc = aes_ccm_validate_args(ccm_param,
1476 			    is_encrypt_init)) != 0) {
1477 				return (rc);
1478 			}
1479 
1480 			aes_ctx->ac_ccm_mac_len = ccm_param->ulMACSize;
1481 			if (is_encrypt_init) {
1482 				aes_ctx->ac_ccm_data_len
1483 				    = ccm_param->ulDataSize;
1484 			} else {
1485 				aes_ctx->ac_ccm_data_len =
1486 				    ccm_param->ulDataSize
1487 				    - aes_ctx->ac_ccm_mac_len;
1488 				aes_ctx->ac_ccm_processed_mac_len = 0;
1489 			}
1490 			aes_ctx->ac_ccm_processed_data_len = 0;
1491 
1492 			aes_ctx->ac_flags |= AES_CCM_MODE;
1493 		} else {
1494 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1495 		}
1496 	} else {
1497 		aes_ctx->ac_flags |= AES_ECB_MODE;
1498 	}
1499 
1500 	if (template == NULL) {
1501 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1502 			return (CRYPTO_HOST_MEMORY);
1503 		/*
1504 		 * Initialize key schedule.
1505 		 * Key length is stored in the key.
1506 		 */
1507 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1508 			kmem_free(keysched, size);
1509 			return (rv);
1510 		}
1511 
1512 		aes_ctx->ac_flags |= AES_PROVIDER_OWNS_KEY_SCHEDULE;
1513 		aes_ctx->ac_keysched_len = size;
1514 	} else {
1515 		keysched = template;
1516 	}
1517 	aes_ctx->ac_keysched = keysched;
1518 
1519 	/* process the nonce and associated data if it is AES CCM mode */
1520 	if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1521 		if (aes_ccm_init(aes_ctx, ccm_param->nonce,
1522 		    ccm_param->ulNonceSize, ccm_param->authData,
1523 		    ccm_param->ulAuthDataSize) != 0) {
1524 			bzero(keysched, size);
1525 			kmem_free(keysched, size);
1526 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1527 		}
1528 		if (!is_encrypt_init) {
1529 			/* allocate buffer for storing decrypted plaintext */
1530 			aes_ctx->ac_ccm_pt_buf =
1531 			    kmem_alloc(aes_ctx->ac_ccm_data_len, kmflag);
1532 			if (aes_ctx->ac_ccm_pt_buf == NULL) {
1533 				bzero(keysched, size);
1534 				kmem_free(keysched, size);
1535 				return (CRYPTO_HOST_MEMORY);
1536 			}
1537 		}
1538 	}
1539 
1540 /* EXPORT DELETE END */
1541 
1542 	return (rv);
1543 }
1544