xref: /titanic_50/usr/src/uts/common/crypto/io/aes.c (revision c5c4113dfcabb1eed3d4bdf7609de5170027a794)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 #pragma ident	"%Z%%M%	%I%	%E% SMI"
27 
28 /*
29  * AES provider for the Kernel Cryptographic Framework (KCF)
30  */
31 
32 #include <sys/types.h>
33 #include <sys/systm.h>
34 #include <sys/modctl.h>
35 #include <sys/cmn_err.h>
36 #include <sys/ddi.h>
37 #include <sys/crypto/common.h>
38 #include <sys/crypto/spi.h>
39 #include <sys/sysmacros.h>
40 #include <sys/strsun.h>
41 #include <aes_impl.h>
42 #include <aes_cbc_crypt.h>
43 
44 extern struct mod_ops mod_cryptoops;
45 
46 /*
47  * Module linkage information for the kernel.
48  */
49 static struct modlcrypto modlcrypto = {
50 	&mod_cryptoops,
51 	"AES Kernel SW Provider %I%"
52 };
53 
54 static struct modlinkage modlinkage = {
55 	MODREV_1,
56 	(void *)&modlcrypto,
57 	NULL
58 };
59 
60 /*
61  * CSPI information (entry points, provider info, etc.)
62  */
63 typedef enum aes_mech_type {
64 	AES_ECB_MECH_INFO_TYPE,		/* SUN_CKM_AES_ECB */
65 	AES_CBC_MECH_INFO_TYPE,		/* SUN_CKM_AES_CBC */
66 	AES_CBC_PAD_MECH_INFO_TYPE,	/* SUN_CKM_AES_CBC_PAD */
67 	AES_CTR_MECH_INFO_TYPE,		/* SUN_CKM_AES_CTR */
68 	AES_CCM_MECH_INFO_TYPE		/* SUN_CKM_AES_CCM */
69 } aes_mech_type_t;
70 
71 /*
72  * The following definitions are to keep EXPORT_SRC happy.
73  */
74 #ifndef AES_MIN_KEY_BYTES
75 #define	AES_MIN_KEY_BYTES		0
76 #endif
77 
78 #ifndef AES_MAX_KEY_BYTES
79 #define	AES_MAX_KEY_BYTES		0
80 #endif
81 
82 /*
83  * Mechanism info structure passed to KCF during registration.
84  */
85 static crypto_mech_info_t aes_mech_info_tab[] = {
86 	/* AES_ECB */
87 	{SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
88 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
89 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
90 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
91 	/* AES_CBC */
92 	{SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
93 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
94 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
95 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
96 	/* AES_CTR */
97 	{SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
98 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
99 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
100 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
101 	/* AES_CCM */
102 	{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
103 	    CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
104 	    CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
105 	    AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
106 };
107 
108 /* operations are in-place if the output buffer is NULL */
109 #define	AES_ARG_INPLACE(input, output)				\
110 	if ((output) == NULL)					\
111 		(output) = (input);
112 
113 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
114 
115 static crypto_control_ops_t aes_control_ops = {
116 	aes_provider_status
117 };
118 
119 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
120     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
121 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
122     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
123 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
124     crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
125 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
126     crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
127 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
128     crypto_req_handle_t);
129 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
130     crypto_req_handle_t);
131 
132 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
133     crypto_req_handle_t);
134 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
135     crypto_data_t *, crypto_req_handle_t);
136 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
137     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
138     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
139 
140 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
141     crypto_req_handle_t);
142 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
143     crypto_data_t *, crypto_req_handle_t);
144 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
145     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
146     crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
147 
148 static crypto_cipher_ops_t aes_cipher_ops = {
149 	aes_encrypt_init,
150 	aes_encrypt,
151 	aes_encrypt_update,
152 	aes_encrypt_final,
153 	aes_encrypt_atomic,
154 	aes_decrypt_init,
155 	aes_decrypt,
156 	aes_decrypt_update,
157 	aes_decrypt_final,
158 	aes_decrypt_atomic
159 };
160 
161 static int aes_create_ctx_template(crypto_provider_handle_t,
162     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
163     size_t *, crypto_req_handle_t);
164 static int aes_free_context(crypto_ctx_t *);
165 
166 static crypto_ctx_ops_t aes_ctx_ops = {
167 	aes_create_ctx_template,
168 	aes_free_context
169 };
170 
171 static crypto_ops_t aes_crypto_ops = {
172 	&aes_control_ops,
173 	NULL,
174 	&aes_cipher_ops,
175 	NULL,
176 	NULL,
177 	NULL,
178 	NULL,
179 	NULL,
180 	NULL,
181 	NULL,
182 	NULL,
183 	NULL,
184 	NULL,
185 	&aes_ctx_ops
186 };
187 
188 static crypto_provider_info_t aes_prov_info = {
189 	CRYPTO_SPI_VERSION_1,
190 	"AES Software Provider",
191 	CRYPTO_SW_PROVIDER,
192 	{&modlinkage},
193 	NULL,
194 	&aes_crypto_ops,
195 	sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
196 	aes_mech_info_tab
197 };
198 
199 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
200 
201 int
202 _init(void)
203 {
204 	int ret;
205 
206 	/*
207 	 * Register with KCF. If the registration fails, return error.
208 	 */
209 	if ((ret = crypto_register_provider(&aes_prov_info,
210 	    &aes_prov_handle)) != CRYPTO_SUCCESS) {
211 		cmn_err(CE_WARN, "%s _init: crypto_register_provider()"
212 		    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
213 		return (EACCES);
214 	}
215 
216 	if ((ret = mod_install(&modlinkage)) != 0) {
217 		int rv;
218 
219 		ASSERT(aes_prov_handle != NULL);
220 		/* We should not return if the unregister returns busy. */
221 		while ((rv = crypto_unregister_provider(aes_prov_handle))
222 		    == CRYPTO_BUSY) {
223 			cmn_err(CE_WARN,
224 			    "%s _init: crypto_unregister_provider() "
225 			    "failed (0x%x). Retrying.",
226 			    CRYPTO_PROVIDER_NAME, rv);
227 			/* wait 10 seconds and try again. */
228 			delay(10 * drv_usectohz(1000000));
229 		}
230 	}
231 
232 	return (ret);
233 }
234 
235 int
236 _fini(void)
237 {
238 	int ret;
239 
240 	/*
241 	 * Unregister from KCF if previous registration succeeded.
242 	 */
243 	if (aes_prov_handle != NULL) {
244 		if ((ret = crypto_unregister_provider(aes_prov_handle)) !=
245 		    CRYPTO_SUCCESS) {
246 			cmn_err(CE_WARN,
247 			    "%s _fini: crypto_unregister_provider() "
248 			    "failed (0x%x)", CRYPTO_PROVIDER_NAME, ret);
249 			return (EBUSY);
250 		}
251 		aes_prov_handle = NULL;
252 	}
253 
254 	return (mod_remove(&modlinkage));
255 }
256 
257 int
258 _info(struct modinfo *modinfop)
259 {
260 	return (mod_info(&modlinkage, modinfop));
261 }
262 
263 
264 static int
265 aes_check_mech_param(crypto_mechanism_t *mechanism)
266 {
267 	int rv = CRYPTO_SUCCESS;
268 
269 	switch (mechanism->cm_type) {
270 	case AES_ECB_MECH_INFO_TYPE:
271 		/* no parameter */
272 		break;
273 	case AES_CBC_MECH_INFO_TYPE:
274 		if (mechanism->cm_param != NULL &&
275 		    mechanism->cm_param_len != AES_BLOCK_LEN)
276 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
277 		break;
278 	case AES_CTR_MECH_INFO_TYPE:
279 		if (mechanism->cm_param != NULL &&
280 		    mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS))
281 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
282 		break;
283 	case AES_CCM_MECH_INFO_TYPE:
284 		if (mechanism->cm_param != NULL &&
285 		    mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS))
286 			rv = CRYPTO_MECHANISM_PARAM_INVALID;
287 		break;
288 	default:
289 		rv = CRYPTO_MECHANISM_INVALID;
290 	}
291 	return (rv);
292 }
293 
294 /* EXPORT DELETE START */
295 
296 /*
297  * Initialize key schedules for AES
298  */
299 static int
300 init_keysched(crypto_key_t *key, void *newbie)
301 {
302 	/*
303 	 * Only keys by value are supported by this module.
304 	 */
305 	switch (key->ck_format) {
306 	case CRYPTO_KEY_RAW:
307 		if (key->ck_length < AES_MINBITS ||
308 		    key->ck_length > AES_MAXBITS) {
309 			return (CRYPTO_KEY_SIZE_RANGE);
310 		}
311 
312 		/* key length must be either 128, 192, or 256 */
313 		if ((key->ck_length & 63) != 0)
314 			return (CRYPTO_KEY_SIZE_RANGE);
315 		break;
316 	default:
317 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
318 	}
319 
320 	aes_init_keysched(key->ck_data, key->ck_length, newbie);
321 	return (CRYPTO_SUCCESS);
322 }
323 
324 /* EXPORT DELETE END */
325 
326 /*
327  * KCF software provider control entry points.
328  */
329 /* ARGSUSED */
330 static void
331 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
332 {
333 	*status = CRYPTO_PROVIDER_READY;
334 }
335 
336 static int
337 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
338     crypto_key_t *key, crypto_spi_ctx_template_t template,
339     crypto_req_handle_t req) {
340 	return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
341 }
342 
343 static int
344 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
345     crypto_key_t *key, crypto_spi_ctx_template_t template,
346     crypto_req_handle_t req) {
347 	return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
348 }
349 
350 
351 
352 /*
353  * KCF software provider encrypt entry points.
354  */
355 static int
356 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
357     crypto_key_t *key, crypto_spi_ctx_template_t template,
358     crypto_req_handle_t req, boolean_t is_encrypt_init)
359 {
360 
361 /* EXPORT DELETE START */
362 
363 	aes_ctx_t *aes_ctx;
364 	int rv;
365 	int kmflag;
366 
367 	/*
368 	 * Only keys by value are supported by this module.
369 	 */
370 	if (key->ck_format != CRYPTO_KEY_RAW) {
371 		return (CRYPTO_KEY_TYPE_INCONSISTENT);
372 	}
373 
374 	if ((rv = aes_check_mech_param(mechanism)) != CRYPTO_SUCCESS)
375 		return (rv);
376 
377 	/*
378 	 * Allocate an AES context.
379 	 */
380 	kmflag = crypto_kmflag(req);
381 	if ((aes_ctx = kmem_zalloc(sizeof (aes_ctx_t), kmflag)) == NULL)
382 		return (CRYPTO_HOST_MEMORY);
383 
384 	rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
385 	    is_encrypt_init);
386 	if (rv != CRYPTO_SUCCESS) {
387 		kmem_free(aes_ctx, sizeof (aes_ctx_t));
388 		return (rv);
389 	}
390 
391 	ctx->cc_provider_private = aes_ctx;
392 
393 /* EXPORT DELETE END */
394 
395 	return (CRYPTO_SUCCESS);
396 }
397 
398 /*
399  * Helper AES encrypt update function for iov input data.
400  */
401 static int
402 aes_cipher_update_iov(aes_ctx_t *aes_ctx, crypto_data_t *input,
403     crypto_data_t *output, int (*cipher)(aes_ctx_t *, caddr_t, size_t,
404     crypto_data_t *))
405 {
406 	int rv;
407 /* EXPORT DELETE START */
408 
409 	if (input->cd_miscdata != NULL) {
410 		if (IS_P2ALIGNED(input->cd_miscdata, sizeof (uint64_t))) {
411 			/* LINTED: pointer alignment */
412 			aes_ctx->ac_iv[0] = *(uint64_t *)input->cd_miscdata;
413 			/* LINTED: pointer alignment */
414 			aes_ctx->ac_iv[1] = *(uint64_t *)&input->cd_miscdata[8];
415 		} else {
416 			uint8_t *miscdata8 = (uint8_t *)&input->cd_miscdata[0];
417 			uint8_t *iv8 = (uint8_t *)&aes_ctx->ac_iv[0];
418 
419 			AES_COPY_BLOCK(miscdata8, iv8);
420 		}
421 	}
422 
423 	if (input->cd_raw.iov_len < input->cd_length)
424 		return (CRYPTO_ARGUMENTS_BAD);
425 
426 	rv = (cipher)(aes_ctx, input->cd_raw.iov_base + input->cd_offset,
427 	    input->cd_length, (input == output) ? NULL : output);
428 
429 /* EXPORT DELETE END */
430 
431 	return (rv);
432 }
433 
434 /*
435  * Helper AES encrypt update function for uio input data.
436  */
437 static int
438 aes_cipher_update_uio(aes_ctx_t *aes_ctx, crypto_data_t *input,
439     crypto_data_t *output, int (*cipher)(aes_ctx_t *, caddr_t, size_t,
440     crypto_data_t *))
441 {
442 /* EXPORT DELETE START */
443 	uio_t *uiop = input->cd_uio;
444 	off_t offset = input->cd_offset;
445 	size_t length = input->cd_length;
446 	uint_t vec_idx;
447 	size_t cur_len;
448 
449 	if (input->cd_miscdata != NULL) {
450 		if (IS_P2ALIGNED(input->cd_miscdata, sizeof (uint64_t))) {
451 			/* LINTED: pointer alignment */
452 			aes_ctx->ac_iv[0] = *(uint64_t *)input->cd_miscdata;
453 			/* LINTED: pointer alignment */
454 			aes_ctx->ac_iv[1] = *(uint64_t *)&input->cd_miscdata[8];
455 		} else {
456 			uint8_t *miscdata8 = (uint8_t *)&input->cd_miscdata[0];
457 			uint8_t *iv8 = (uint8_t *)&aes_ctx->ac_iv[0];
458 
459 			AES_COPY_BLOCK(miscdata8, iv8);
460 		}
461 	}
462 
463 	if (input->cd_uio->uio_segflg != UIO_SYSSPACE) {
464 		return (CRYPTO_ARGUMENTS_BAD);
465 	}
466 
467 	/*
468 	 * Jump to the first iovec containing data to be
469 	 * processed.
470 	 */
471 	for (vec_idx = 0; vec_idx < uiop->uio_iovcnt &&
472 	    offset >= uiop->uio_iov[vec_idx].iov_len;
473 	    offset -= uiop->uio_iov[vec_idx++].iov_len)
474 		;
475 	if (vec_idx == uiop->uio_iovcnt) {
476 		/*
477 		 * The caller specified an offset that is larger than the
478 		 * total size of the buffers it provided.
479 		 */
480 		return (CRYPTO_DATA_LEN_RANGE);
481 	}
482 
483 	/*
484 	 * Now process the iovecs.
485 	 */
486 	while (vec_idx < uiop->uio_iovcnt && length > 0) {
487 		cur_len = MIN(uiop->uio_iov[vec_idx].iov_len -
488 		    offset, length);
489 
490 		(cipher)(aes_ctx, uiop->uio_iov[vec_idx].iov_base + offset,
491 		    cur_len, (input == output) ? NULL : output);
492 
493 		length -= cur_len;
494 		vec_idx++;
495 		offset = 0;
496 	}
497 
498 	if (vec_idx == uiop->uio_iovcnt && length > 0) {
499 		/*
500 		 * The end of the specified iovec's was reached but
501 		 * the length requested could not be processed, i.e.
502 		 * The caller requested to digest more data than it provided.
503 		 */
504 
505 		return (CRYPTO_DATA_LEN_RANGE);
506 	}
507 
508 /* EXPORT DELETE END */
509 
510 	return (CRYPTO_SUCCESS);
511 }
512 
513 /*
514  * Helper AES encrypt update function for mblk input data.
515  */
516 static int
517 aes_cipher_update_mp(aes_ctx_t *aes_ctx, crypto_data_t *input,
518     crypto_data_t *output, int (*cipher)(aes_ctx_t *, caddr_t, size_t,
519     crypto_data_t *))
520 {
521 /* EXPORT DELETE START */
522 	off_t offset = input->cd_offset;
523 	size_t length = input->cd_length;
524 	mblk_t *mp;
525 	size_t cur_len;
526 
527 	if (input->cd_miscdata != NULL) {
528 		if (IS_P2ALIGNED(input->cd_miscdata, sizeof (uint64_t))) {
529 			/* LINTED: pointer alignment */
530 			aes_ctx->ac_iv[0] = *(uint64_t *)input->cd_miscdata;
531 			/* LINTED: pointer alignment */
532 			aes_ctx->ac_iv[1] = *(uint64_t *)&input->cd_miscdata[8];
533 		} else {
534 			uint8_t *miscdata8 = (uint8_t *)&input->cd_miscdata[0];
535 			uint8_t *iv8 = (uint8_t *)&aes_ctx->ac_iv[0];
536 
537 			AES_COPY_BLOCK(miscdata8, iv8);
538 		}
539 	}
540 
541 	/*
542 	 * Jump to the first mblk_t containing data to be processed.
543 	 */
544 	for (mp = input->cd_mp; mp != NULL && offset >= MBLKL(mp);
545 	    offset -= MBLKL(mp), mp = mp->b_cont)
546 		;
547 	if (mp == NULL) {
548 		/*
549 		 * The caller specified an offset that is larger than the
550 		 * total size of the buffers it provided.
551 		 */
552 		return (CRYPTO_DATA_LEN_RANGE);
553 	}
554 
555 	/*
556 	 * Now do the processing on the mblk chain.
557 	 */
558 	while (mp != NULL && length > 0) {
559 		cur_len = MIN(MBLKL(mp) - offset, length);
560 		(cipher)(aes_ctx, (char *)(mp->b_rptr + offset), cur_len,
561 		    (input == output) ? NULL : output);
562 
563 		length -= cur_len;
564 		offset = 0;
565 		mp = mp->b_cont;
566 	}
567 
568 	if (mp == NULL && length > 0) {
569 		/*
570 		 * The end of the mblk was reached but the length requested
571 		 * could not be processed, i.e. The caller requested
572 		 * to digest more data than it provided.
573 		 */
574 		return (CRYPTO_DATA_LEN_RANGE);
575 	}
576 
577 /* EXPORT DELETE END */
578 
579 	return (CRYPTO_SUCCESS);
580 }
581 
582 /* ARGSUSED */
583 static int
584 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
585     crypto_data_t *ciphertext, crypto_req_handle_t req)
586 {
587 	int ret = CRYPTO_FAILED;
588 
589 /* EXPORT DELETE START */
590 
591 	aes_ctx_t *aes_ctx;
592 	size_t saved_length, saved_offset, length_needed;
593 
594 	ASSERT(ctx->cc_provider_private != NULL);
595 	aes_ctx = ctx->cc_provider_private;
596 
597 	/*
598 	 * For block ciphers, plaintext must be a multiple of AES block size.
599 	 * This test is only valid for ciphers whose blocksize is a power of 2.
600 	 * Even though AES CCM mode is a block cipher, it does not
601 	 * require the plaintext to be a multiple of AES block size.
602 	 * The length requirement for AES CCM mode has already been checked
603 	 * at init time
604 	 */
605 	if (((aes_ctx->ac_flags & AES_CTR_MODE) == 0) &&
606 	    ((aes_ctx->ac_flags & AES_CCM_MODE) == 0) &&
607 	    (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
608 		return (CRYPTO_DATA_LEN_RANGE);
609 
610 	AES_ARG_INPLACE(plaintext, ciphertext);
611 
612 	/*
613 	 * We need to just return the length needed to store the output.
614 	 * We should not destroy the context for the following case.
615 	 */
616 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
617 		length_needed = plaintext->cd_length + aes_ctx->ac_ccm_mac_len;
618 	} else {
619 		length_needed = plaintext->cd_length;
620 	}
621 
622 	if (ciphertext->cd_length < length_needed) {
623 		ciphertext->cd_length = length_needed;
624 		return (CRYPTO_BUFFER_TOO_SMALL);
625 	}
626 
627 	saved_length = ciphertext->cd_length;
628 	saved_offset = ciphertext->cd_offset;
629 
630 	/*
631 	 * Do an update on the specified input data.
632 	 */
633 	ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
634 	if (ret != CRYPTO_SUCCESS) {
635 		return (ret);
636 	}
637 
638 	/*
639 	 * For CCM mode, aes_ccm_encrypt_final() will take care of any
640 	 * left-over unprocessed data, and compute the MAC
641 	 */
642 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
643 		/*
644 		 * aes_ccm_encrypt_final() will compute the MAC and append
645 		 * it to existing ciphertext. So, need to adjust the left over
646 		 * length value accordingly
647 		 */
648 
649 		/* order of following 2 lines MUST not be reversed */
650 		ciphertext->cd_offset = ciphertext->cd_length;
651 		ciphertext->cd_length = saved_length - ciphertext->cd_length;
652 		ret = aes_ccm_encrypt_final(aes_ctx, ciphertext);
653 		if (ret != CRYPTO_SUCCESS) {
654 			return (ret);
655 		}
656 
657 		if (plaintext != ciphertext) {
658 			ciphertext->cd_length =
659 			    ciphertext->cd_offset - saved_offset;
660 		}
661 		ciphertext->cd_offset = saved_offset;
662 	}
663 
664 	ASSERT(aes_ctx->ac_remainder_len == 0);
665 	(void) aes_free_context(ctx);
666 
667 /* EXPORT DELETE END */
668 
669 	/* LINTED */
670 	return (ret);
671 }
672 
673 /* ARGSUSED */
674 static int
675 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
676     crypto_data_t *plaintext, crypto_req_handle_t req)
677 {
678 	int ret = CRYPTO_FAILED;
679 
680 /* EXPORT DELETE START */
681 
682 	aes_ctx_t *aes_ctx;
683 	off_t saved_offset;
684 	size_t saved_length;
685 
686 	ASSERT(ctx->cc_provider_private != NULL);
687 	aes_ctx = ctx->cc_provider_private;
688 
689 	/*
690 	 * For block ciphers, plaintext must be a multiple of AES block size.
691 	 * This test is only valid for ciphers whose blocksize is a power of 2.
692 	 * Even though AES CCM mode is a block cipher, it does not
693 	 * require the plaintext to be a multiple of AES block size.
694 	 * The length requirement for AES CCM mode has already been checked
695 	 * at init time
696 	 */
697 	if (((aes_ctx->ac_flags & AES_CTR_MODE) == 0) &&
698 	    ((aes_ctx->ac_flags & AES_CCM_MODE) == 0) &&
699 	    (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
700 		return (CRYPTO_DATA_LEN_RANGE);
701 
702 	AES_ARG_INPLACE(ciphertext, plaintext);
703 
704 	/*
705 	 * We need to just return the length needed to store the output.
706 	 * We should not destroy the context for the following case.
707 	 *
708 	 * For AES CCM mode, size of the plaintext will be MAC_SIZE
709 	 * smaller than size of the cipher text.
710 	 */
711 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
712 		if (plaintext->cd_length < aes_ctx->ac_ccm_data_len) {
713 			plaintext->cd_length = aes_ctx->ac_ccm_data_len;
714 			return (CRYPTO_BUFFER_TOO_SMALL);
715 		}
716 		saved_offset = plaintext->cd_offset;
717 		saved_length = plaintext->cd_length;
718 	} else if (plaintext->cd_length < ciphertext->cd_length) {
719 		plaintext->cd_length = ciphertext->cd_length;
720 		return (CRYPTO_BUFFER_TOO_SMALL);
721 	}
722 
723 	/*
724 	 * Do an update on the specified input data.
725 	 */
726 	ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
727 	if (ret != CRYPTO_SUCCESS) {
728 		goto cleanup;
729 	}
730 
731 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
732 		ASSERT(aes_ctx->ac_ccm_processed_data_len
733 		    == aes_ctx->ac_ccm_data_len);
734 		ASSERT(aes_ctx->ac_ccm_processed_mac_len
735 		    == aes_ctx->ac_ccm_mac_len);
736 
737 		/* order of following 2 lines MUST not be reversed */
738 		plaintext->cd_offset = plaintext->cd_length;
739 		plaintext->cd_length = saved_length - plaintext->cd_length;
740 
741 		ret = aes_ccm_decrypt_final(aes_ctx, plaintext);
742 		if (ret == CRYPTO_SUCCESS) {
743 			if (plaintext != ciphertext) {
744 				plaintext->cd_length =
745 				    plaintext->cd_offset - saved_offset;
746 			}
747 		} else {
748 			plaintext->cd_length = saved_length;
749 		}
750 
751 		plaintext->cd_offset = saved_offset;
752 	}
753 
754 	ASSERT(aes_ctx->ac_remainder_len == 0);
755 
756 cleanup:
757 	if (aes_ctx->ac_ccm_pt_buf) {
758 		kmem_free(aes_ctx->ac_ccm_pt_buf, aes_ctx->ac_ccm_data_len);
759 	}
760 	(void) aes_free_context(ctx);
761 
762 /* EXPORT DELETE END */
763 
764 	/* LINTED */
765 	return (ret);
766 }
767 
768 /* ARGSUSED */
769 static int
770 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
771     crypto_data_t *ciphertext, crypto_req_handle_t req)
772 {
773 	off_t saved_offset;
774 	size_t saved_length, out_len;
775 	int ret = CRYPTO_SUCCESS;
776 	aes_ctx_t *aes_ctx;
777 
778 	ASSERT(ctx->cc_provider_private != NULL);
779 
780 	AES_ARG_INPLACE(plaintext, ciphertext);
781 
782 	/* compute number of bytes that will hold the ciphertext */
783 	out_len = ((aes_ctx_t *)ctx->cc_provider_private)->ac_remainder_len;
784 	out_len += plaintext->cd_length;
785 	out_len &= ~(AES_BLOCK_LEN - 1);
786 
787 	/* return length needed to store the output */
788 	if (ciphertext->cd_length < out_len) {
789 		ciphertext->cd_length = out_len;
790 		return (CRYPTO_BUFFER_TOO_SMALL);
791 	}
792 
793 	saved_offset = ciphertext->cd_offset;
794 	saved_length = ciphertext->cd_length;
795 
796 
797 	/*
798 	 * Do the AES update on the specified input data.
799 	 */
800 	switch (plaintext->cd_format) {
801 	case CRYPTO_DATA_RAW:
802 		ret = aes_cipher_update_iov(ctx->cc_provider_private,
803 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks);
804 		break;
805 	case CRYPTO_DATA_UIO:
806 		ret = aes_cipher_update_uio(ctx->cc_provider_private,
807 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks);
808 		break;
809 	case CRYPTO_DATA_MBLK:
810 		ret = aes_cipher_update_mp(ctx->cc_provider_private,
811 		    plaintext, ciphertext, aes_encrypt_contiguous_blocks);
812 		break;
813 	default:
814 		ret = CRYPTO_ARGUMENTS_BAD;
815 	}
816 
817 	/*
818 	 * Since AES counter mode is a stream cipher, we call
819 	 * aes_counter_final() to pick up any remaining bytes.
820 	 * It is an internal function that does not destroy
821 	 * the context like *normal* final routines.
822 	 */
823 	aes_ctx = ctx->cc_provider_private;
824 	if ((aes_ctx->ac_flags & AES_CTR_MODE) &&
825 	    (aes_ctx->ac_remainder_len > 0)) {
826 		ret = aes_counter_final(aes_ctx, ciphertext);
827 	}
828 
829 	if (ret == CRYPTO_SUCCESS) {
830 		if (plaintext != ciphertext)
831 			ciphertext->cd_length =
832 			    ciphertext->cd_offset - saved_offset;
833 	} else {
834 		ciphertext->cd_length = saved_length;
835 	}
836 	ciphertext->cd_offset = saved_offset;
837 
838 	return (ret);
839 }
840 
841 /* ARGSUSED */
842 static int
843 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
844     crypto_data_t *plaintext, crypto_req_handle_t req)
845 {
846 	off_t saved_offset;
847 	size_t saved_length, out_len;
848 	int ret = CRYPTO_SUCCESS;
849 	aes_ctx_t *aes_ctx;
850 
851 	ASSERT(ctx->cc_provider_private != NULL);
852 
853 	AES_ARG_INPLACE(ciphertext, plaintext);
854 
855 	/* compute number of bytes that will hold the plaintext */
856 	out_len = ((aes_ctx_t *)ctx->cc_provider_private)->ac_remainder_len;
857 	out_len += ciphertext->cd_length;
858 	out_len &= ~(AES_BLOCK_LEN - 1);
859 
860 	/* return length needed to store the output */
861 	if (plaintext->cd_length < out_len) {
862 		plaintext->cd_length = out_len;
863 		return (CRYPTO_BUFFER_TOO_SMALL);
864 	}
865 
866 	saved_offset = plaintext->cd_offset;
867 	saved_length = plaintext->cd_length;
868 
869 	/*
870 	 * Do the AES update on the specified input data.
871 	 */
872 	switch (ciphertext->cd_format) {
873 	case CRYPTO_DATA_RAW:
874 		ret = aes_cipher_update_iov(ctx->cc_provider_private,
875 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks);
876 		break;
877 	case CRYPTO_DATA_UIO:
878 		ret = aes_cipher_update_uio(ctx->cc_provider_private,
879 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks);
880 		break;
881 	case CRYPTO_DATA_MBLK:
882 		ret = aes_cipher_update_mp(ctx->cc_provider_private,
883 		    ciphertext, plaintext, aes_decrypt_contiguous_blocks);
884 		break;
885 	default:
886 		ret = CRYPTO_ARGUMENTS_BAD;
887 	}
888 
889 	/*
890 	 * Since AES counter mode is a stream cipher, we call
891 	 * aes_counter_final() to pick up any remaining bytes.
892 	 * It is an internal function that does not destroy
893 	 * the context like *normal* final routines.
894 	 */
895 	aes_ctx = ctx->cc_provider_private;
896 	if ((aes_ctx->ac_flags & AES_CTR_MODE) &&
897 	    (aes_ctx->ac_remainder_len > 0)) {
898 		ret = aes_counter_final(aes_ctx, plaintext);
899 	}
900 
901 	if (ret == CRYPTO_SUCCESS) {
902 		if (ciphertext != plaintext)
903 			plaintext->cd_length =
904 			    plaintext->cd_offset - saved_offset;
905 	} else {
906 		plaintext->cd_length = saved_length;
907 	}
908 	plaintext->cd_offset = saved_offset;
909 
910 
911 	return (ret);
912 }
913 
914 /* ARGSUSED */
915 static int
916 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
917     crypto_req_handle_t req)
918 {
919 
920 /* EXPORT DELETE START */
921 
922 	aes_ctx_t *aes_ctx;
923 	int ret;
924 
925 	ASSERT(ctx->cc_provider_private != NULL);
926 	aes_ctx = ctx->cc_provider_private;
927 
928 	if (data->cd_format != CRYPTO_DATA_RAW &&
929 	    data->cd_format != CRYPTO_DATA_UIO &&
930 	    data->cd_format != CRYPTO_DATA_MBLK) {
931 		return (CRYPTO_ARGUMENTS_BAD);
932 	}
933 
934 	if (aes_ctx->ac_flags & AES_CTR_MODE) {
935 		if (aes_ctx->ac_remainder_len > 0) {
936 			ret = aes_counter_final(aes_ctx, data);
937 			if (ret != CRYPTO_SUCCESS)
938 				return (ret);
939 		}
940 		data->cd_length = 0;
941 	} else if (aes_ctx->ac_flags & AES_CCM_MODE) {
942 		ret = aes_ccm_encrypt_final(aes_ctx, data);
943 		if (ret != CRYPTO_SUCCESS) {
944 			return (ret);
945 		}
946 	} else {
947 		/*
948 		 * There must be no unprocessed plaintext.
949 		 * This happens if the length of the last data is
950 		 * not a multiple of the AES block length.
951 		 */
952 		if (aes_ctx->ac_remainder_len > 0) {
953 			return (CRYPTO_DATA_LEN_RANGE);
954 		}
955 	}
956 
957 	(void) aes_free_context(ctx);
958 
959 /* EXPORT DELETE END */
960 
961 	return (CRYPTO_SUCCESS);
962 }
963 
964 /* ARGSUSED */
965 static int
966 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
967     crypto_req_handle_t req)
968 {
969 
970 /* EXPORT DELETE START */
971 
972 	aes_ctx_t *aes_ctx;
973 	int ret;
974 	off_t saved_offset;
975 	size_t saved_length;
976 
977 	ASSERT(ctx->cc_provider_private != NULL);
978 	aes_ctx = ctx->cc_provider_private;
979 
980 	if (data->cd_format != CRYPTO_DATA_RAW &&
981 	    data->cd_format != CRYPTO_DATA_UIO &&
982 	    data->cd_format != CRYPTO_DATA_MBLK) {
983 		return (CRYPTO_ARGUMENTS_BAD);
984 	}
985 
986 	/*
987 	 * There must be no unprocessed ciphertext.
988 	 * This happens if the length of the last ciphertext is
989 	 * not a multiple of the AES block length.
990 	 */
991 	if (aes_ctx->ac_remainder_len > 0) {
992 		if ((aes_ctx->ac_flags & AES_CTR_MODE) == 0)
993 			return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
994 		else {
995 			ret = aes_counter_final(aes_ctx, data);
996 			if (ret != CRYPTO_SUCCESS)
997 				return (ret);
998 		}
999 	}
1000 
1001 	if (aes_ctx->ac_flags & AES_CCM_MODE) {
1002 		/*
1003 		 * This is where all the plaintext is returned, make sure
1004 		 * the plaintext buffer is big enough
1005 		 */
1006 		size_t pt_len = aes_ctx->ac_ccm_data_len;
1007 		if (data->cd_length < pt_len) {
1008 			data->cd_length = pt_len;
1009 			return (CRYPTO_BUFFER_TOO_SMALL);
1010 		}
1011 
1012 		ASSERT(aes_ctx->ac_ccm_processed_data_len == pt_len);
1013 		ASSERT(aes_ctx->ac_ccm_processed_mac_len
1014 		    == aes_ctx->ac_ccm_mac_len);
1015 		saved_offset = data->cd_offset;
1016 		saved_length = data->cd_length;
1017 		ret = aes_ccm_decrypt_final(aes_ctx, data);
1018 		if (ret == CRYPTO_SUCCESS) {
1019 			data->cd_length = data->cd_offset - saved_offset;
1020 		} else {
1021 			data->cd_length = saved_length;
1022 		}
1023 
1024 		data->cd_offset = saved_offset;
1025 		if (ret != CRYPTO_SUCCESS) {
1026 			return (ret);
1027 		}
1028 	}
1029 
1030 
1031 	if ((aes_ctx->ac_flags & AES_CTR_MODE) == 0)
1032 		data->cd_length = 0;
1033 
1034 	if (aes_ctx->ac_ccm_pt_buf != NULL) {
1035 		kmem_free(aes_ctx->ac_ccm_pt_buf, aes_ctx->ac_ccm_data_len);
1036 	}
1037 
1038 	(void) aes_free_context(ctx);
1039 
1040 /* EXPORT DELETE END */
1041 
1042 	return (CRYPTO_SUCCESS);
1043 }
1044 
1045 /* ARGSUSED */
1046 static int
1047 aes_encrypt_atomic(crypto_provider_handle_t provider,
1048     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1049     crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1050     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1051 {
1052 	aes_ctx_t aes_ctx;	/* on the stack */
1053 	off_t saved_offset;
1054 	size_t saved_length;
1055 	int ret;
1056 
1057 	AES_ARG_INPLACE(plaintext, ciphertext);
1058 
1059 	if ((mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) &&
1060 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)) {
1061 		/*
1062 		 * Plaintext must be a multiple of AES block size.
1063 		 * This test only works for non-padded mechanisms
1064 		 * when blocksize is 2^N.
1065 		 */
1066 		if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1067 			return (CRYPTO_DATA_LEN_RANGE);
1068 	}
1069 
1070 	/* return length needed to store the output */
1071 	if (ciphertext->cd_length < plaintext->cd_length) {
1072 		ciphertext->cd_length = plaintext->cd_length;
1073 		return (CRYPTO_BUFFER_TOO_SMALL);
1074 	}
1075 
1076 	if ((ret = aes_check_mech_param(mechanism)) != CRYPTO_SUCCESS)
1077 		return (ret);
1078 
1079 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1080 
1081 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1082 	    crypto_kmflag(req), B_TRUE);
1083 	if (ret != CRYPTO_SUCCESS)
1084 		return (ret);
1085 
1086 	if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1087 		size_t length_needed
1088 		    = plaintext->cd_length + aes_ctx.ac_ccm_mac_len;
1089 		if (ciphertext->cd_length < length_needed) {
1090 			ciphertext->cd_length = length_needed;
1091 			return (CRYPTO_BUFFER_TOO_SMALL);
1092 		}
1093 	}
1094 
1095 
1096 	saved_offset = ciphertext->cd_offset;
1097 	saved_length = ciphertext->cd_length;
1098 
1099 	/*
1100 	 * Do an update on the specified input data.
1101 	 */
1102 	switch (plaintext->cd_format) {
1103 	case CRYPTO_DATA_RAW:
1104 		ret = aes_cipher_update_iov(&aes_ctx, plaintext, ciphertext,
1105 		    aes_encrypt_contiguous_blocks);
1106 		break;
1107 	case CRYPTO_DATA_UIO:
1108 		ret = aes_cipher_update_uio(&aes_ctx, plaintext, ciphertext,
1109 		    aes_encrypt_contiguous_blocks);
1110 		break;
1111 	case CRYPTO_DATA_MBLK:
1112 		ret = aes_cipher_update_mp(&aes_ctx, plaintext, ciphertext,
1113 		    aes_encrypt_contiguous_blocks);
1114 		break;
1115 	default:
1116 		ret = CRYPTO_ARGUMENTS_BAD;
1117 	}
1118 
1119 	if (ret == CRYPTO_SUCCESS) {
1120 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1121 			ret = aes_ccm_encrypt_final(&aes_ctx, ciphertext);
1122 			if (ret != CRYPTO_SUCCESS)
1123 				goto out;
1124 			ASSERT(aes_ctx.ac_remainder_len == 0);
1125 		} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1126 			if (aes_ctx.ac_remainder_len > 0) {
1127 				ret = aes_counter_final(&aes_ctx, ciphertext);
1128 				if (ret != CRYPTO_SUCCESS)
1129 					goto out;
1130 			}
1131 		} else {
1132 			ASSERT(aes_ctx.ac_remainder_len == 0);
1133 		}
1134 
1135 		if (plaintext != ciphertext) {
1136 			ciphertext->cd_length =
1137 			    ciphertext->cd_offset - saved_offset;
1138 		}
1139 	} else {
1140 		ciphertext->cd_length = saved_length;
1141 	}
1142 	ciphertext->cd_offset = saved_offset;
1143 
1144 out:
1145 	if (aes_ctx.ac_flags & AES_PROVIDER_OWNS_KEY_SCHEDULE) {
1146 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1147 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1148 	}
1149 
1150 	return (ret);
1151 }
1152 
1153 /* ARGSUSED */
1154 static int
1155 aes_decrypt_atomic(crypto_provider_handle_t provider,
1156     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1157     crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1158     crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1159 {
1160 	aes_ctx_t aes_ctx;	/* on the stack */
1161 	off_t saved_offset;
1162 	size_t saved_length;
1163 	int ret;
1164 
1165 	AES_ARG_INPLACE(ciphertext, plaintext);
1166 
1167 	/*
1168 	 * For block ciphers, ciphertext must be a multiple of AES block size.
1169 	 * This test is only valid for non-padded mechanisms
1170 	 * when blocksize is 2^N
1171 	 * Even though AES CCM mode is a block cipher, it does not
1172 	 * require the plaintext to be a multiple of AES block size.
1173 	 * The length requirement for AES CCM mode will be checked
1174 	 * at init time
1175 	 */
1176 	if ((mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) &&
1177 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE) &&
1178 	    ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0))
1179 		return (CRYPTO_DATA_LEN_RANGE);
1180 
1181 	/*
1182 	 * return length needed to store the output, length requirement
1183 	 * for AES CCM mode can not be determined until later
1184 	 */
1185 	if ((plaintext->cd_length < ciphertext->cd_length) &&
1186 	    (mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)) {
1187 		plaintext->cd_length = ciphertext->cd_length;
1188 		return (CRYPTO_BUFFER_TOO_SMALL);
1189 	}
1190 
1191 
1192 	if ((ret = aes_check_mech_param(mechanism)) != CRYPTO_SUCCESS)
1193 		return (ret);
1194 
1195 	bzero(&aes_ctx, sizeof (aes_ctx_t));
1196 
1197 	ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1198 	    crypto_kmflag(req), B_FALSE);
1199 	if (ret != CRYPTO_SUCCESS)
1200 		return (ret);
1201 
1202 	/* check length requirement for AES CCM mode now */
1203 	if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1204 		if (plaintext->cd_length < aes_ctx.ac_ccm_data_len) {
1205 			plaintext->cd_length = aes_ctx.ac_ccm_data_len;
1206 			ret = CRYPTO_BUFFER_TOO_SMALL;
1207 			goto out;
1208 		}
1209 	}
1210 
1211 	saved_offset = plaintext->cd_offset;
1212 	saved_length = plaintext->cd_length;
1213 
1214 	/*
1215 	 * Do an update on the specified input data.
1216 	 */
1217 	switch (ciphertext->cd_format) {
1218 	case CRYPTO_DATA_RAW:
1219 		ret = aes_cipher_update_iov(&aes_ctx, ciphertext, plaintext,
1220 		    aes_decrypt_contiguous_blocks);
1221 		break;
1222 	case CRYPTO_DATA_UIO:
1223 		ret = aes_cipher_update_uio(&aes_ctx, ciphertext, plaintext,
1224 		    aes_decrypt_contiguous_blocks);
1225 		break;
1226 	case CRYPTO_DATA_MBLK:
1227 		ret = aes_cipher_update_mp(&aes_ctx, ciphertext, plaintext,
1228 		    aes_decrypt_contiguous_blocks);
1229 		break;
1230 	default:
1231 		ret = CRYPTO_ARGUMENTS_BAD;
1232 	}
1233 
1234 	if (ret == CRYPTO_SUCCESS) {
1235 		if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1236 			ASSERT(aes_ctx.ac_ccm_processed_data_len
1237 			    == aes_ctx.ac_ccm_data_len);
1238 			ASSERT(aes_ctx.ac_ccm_processed_mac_len
1239 			    == aes_ctx.ac_ccm_mac_len);
1240 			ret = aes_ccm_decrypt_final(&aes_ctx, plaintext);
1241 			ASSERT(aes_ctx.ac_remainder_len == 0);
1242 			if ((ret == CRYPTO_SUCCESS) &&
1243 			    (ciphertext != plaintext)) {
1244 				plaintext->cd_length =
1245 				    plaintext->cd_offset - saved_offset;
1246 			} else {
1247 				plaintext->cd_length = saved_length;
1248 			}
1249 		} else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1250 			ASSERT(aes_ctx.ac_remainder_len == 0);
1251 			if (ciphertext != plaintext)
1252 				plaintext->cd_length =
1253 				    plaintext->cd_offset - saved_offset;
1254 		} else {
1255 			if (aes_ctx.ac_remainder_len > 0) {
1256 				ret = aes_counter_final(&aes_ctx, plaintext);
1257 				if (ret != CRYPTO_SUCCESS)
1258 					goto out;
1259 			}
1260 			if (ciphertext != plaintext)
1261 				plaintext->cd_length =
1262 				    plaintext->cd_offset - saved_offset;
1263 		}
1264 	} else {
1265 		plaintext->cd_length = saved_length;
1266 	}
1267 	plaintext->cd_offset = saved_offset;
1268 
1269 out:
1270 	if (aes_ctx.ac_flags & AES_PROVIDER_OWNS_KEY_SCHEDULE) {
1271 		bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1272 		kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1273 	}
1274 
1275 	if (aes_ctx.ac_ccm_pt_buf != NULL) {
1276 		kmem_free(aes_ctx.ac_ccm_pt_buf, aes_ctx.ac_ccm_data_len);
1277 	}
1278 
1279 	return (ret);
1280 }
1281 
1282 /*
1283  * KCF software provider context template entry points.
1284  */
1285 /* ARGSUSED */
1286 static int
1287 aes_create_ctx_template(crypto_provider_handle_t provider,
1288     crypto_mechanism_t *mechanism, crypto_key_t *key,
1289     crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1290 {
1291 
1292 /* EXPORT DELETE START */
1293 
1294 	void *keysched;
1295 	size_t size;
1296 	int rv;
1297 
1298 	if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1299 	    mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1300 	    mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1301 	    mechanism->cm_type != AES_CCM_MECH_INFO_TYPE)
1302 		return (CRYPTO_MECHANISM_INVALID);
1303 
1304 	if ((keysched = aes_alloc_keysched(&size,
1305 	    crypto_kmflag(req))) == NULL) {
1306 		return (CRYPTO_HOST_MEMORY);
1307 	}
1308 
1309 	/*
1310 	 * Initialize key schedule.  Key length information is stored
1311 	 * in the key.
1312 	 */
1313 	if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1314 		bzero(keysched, size);
1315 		kmem_free(keysched, size);
1316 		return (rv);
1317 	}
1318 
1319 	*tmpl = keysched;
1320 	*tmpl_size = size;
1321 
1322 /* EXPORT DELETE END */
1323 
1324 	return (CRYPTO_SUCCESS);
1325 }
1326 
1327 /* ARGSUSED */
1328 static int
1329 aes_free_context(crypto_ctx_t *ctx)
1330 {
1331 
1332 /* EXPORT DELETE START */
1333 
1334 	aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1335 
1336 	if (aes_ctx != NULL) {
1337 		if (aes_ctx->ac_flags & AES_PROVIDER_OWNS_KEY_SCHEDULE) {
1338 			ASSERT(aes_ctx->ac_keysched_len != 0);
1339 			bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1340 			kmem_free(aes_ctx->ac_keysched,
1341 			    aes_ctx->ac_keysched_len);
1342 		}
1343 		kmem_free(aes_ctx, sizeof (aes_ctx_t));
1344 		ctx->cc_provider_private = NULL;
1345 	}
1346 
1347 /* EXPORT DELETE END */
1348 
1349 	return (CRYPTO_SUCCESS);
1350 }
1351 
1352 /* ARGSUSED */
1353 static int
1354 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1355     crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1356     boolean_t is_encrypt_init)
1357 {
1358 	int rv = CRYPTO_SUCCESS;
1359 
1360 /* EXPORT DELETE START */
1361 
1362 	void *keysched;
1363 	size_t size;
1364 	CK_AES_CCM_PARAMS *ccm_param = NULL;
1365 
1366 	aes_ctx->ac_flags = 0;
1367 
1368 	if (mechanism->cm_type == AES_CBC_MECH_INFO_TYPE) {
1369 		/*
1370 		 * Copy 128-bit IV into context.
1371 		 *
1372 		 * If cm_param == NULL then the IV comes from the
1373 		 * cd_miscdata field in the crypto_data structure.
1374 		 */
1375 		if (mechanism->cm_param != NULL) {
1376 			ASSERT(mechanism->cm_param_len == AES_BLOCK_LEN);
1377 			if (IS_P2ALIGNED(mechanism->cm_param,
1378 			    sizeof (uint64_t))) {
1379 				uint64_t *param64;
1380 				param64 = (uint64_t *)mechanism->cm_param;
1381 
1382 				aes_ctx->ac_iv[0] = *param64++;
1383 				aes_ctx->ac_iv[1] = *param64;
1384 			} else {
1385 				uint8_t *iv8;
1386 				uint8_t *p8;
1387 				iv8 = (uint8_t *)&aes_ctx->ac_iv;
1388 				p8 = (uint8_t *)&mechanism->cm_param[0];
1389 
1390 				iv8[0] = p8[0];
1391 				iv8[1] = p8[1];
1392 				iv8[2] = p8[2];
1393 				iv8[3] = p8[3];
1394 				iv8[4] = p8[4];
1395 				iv8[5] = p8[5];
1396 				iv8[6] = p8[6];
1397 				iv8[7] = p8[7];
1398 				iv8[8] = p8[8];
1399 				iv8[9] = p8[9];
1400 				iv8[10] = p8[10];
1401 				iv8[11] = p8[11];
1402 				iv8[12] = p8[12];
1403 				iv8[13] = p8[13];
1404 				iv8[14] = p8[14];
1405 				iv8[15] = p8[15];
1406 			}
1407 		}
1408 
1409 		aes_ctx->ac_lastp = (uint8_t *)&aes_ctx->ac_iv[0];
1410 		aes_ctx->ac_flags |= AES_CBC_MODE;
1411 
1412 	} else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1413 		if (mechanism->cm_param != NULL) {
1414 			CK_AES_CTR_PARAMS *pp;
1415 			uint64_t mask = 0;
1416 			ulong_t count;
1417 			uint8_t *iv8;
1418 			uint8_t *p8;
1419 
1420 			/* XXX what to do about miscdata */
1421 			pp = (CK_AES_CTR_PARAMS *)mechanism->cm_param;
1422 			count = pp->ulCounterBits;
1423 			if (count == 0 || count > 64) {
1424 				return (CRYPTO_MECHANISM_PARAM_INVALID);
1425 			}
1426 			while (count-- > 0)
1427 				mask |= (1ULL << count);
1428 #ifdef _LITTLE_ENDIAN
1429 			p8 = (uint8_t *)&mask;
1430 			mask = (((uint64_t)p8[0] << 56) |
1431 			    ((uint64_t)p8[1] << 48) |
1432 			    ((uint64_t)p8[2] << 40) |
1433 			    ((uint64_t)p8[3] << 32) |
1434 			    ((uint64_t)p8[4] << 24) |
1435 			    ((uint64_t)p8[5] << 16) |
1436 			    ((uint64_t)p8[6] << 8) |
1437 			    (uint64_t)p8[7]);
1438 #endif
1439 			aes_ctx->ac_counter_mask = mask;
1440 
1441 			iv8 = (uint8_t *)&aes_ctx->ac_iv;
1442 			p8 = (uint8_t *)&pp->cb[0];
1443 
1444 			iv8[0] = p8[0];
1445 			iv8[1] = p8[1];
1446 			iv8[2] = p8[2];
1447 			iv8[3] = p8[3];
1448 			iv8[4] = p8[4];
1449 			iv8[5] = p8[5];
1450 			iv8[6] = p8[6];
1451 			iv8[7] = p8[7];
1452 			iv8[8] = p8[8];
1453 			iv8[9] = p8[9];
1454 			iv8[10] = p8[10];
1455 			iv8[11] = p8[11];
1456 			iv8[12] = p8[12];
1457 			iv8[13] = p8[13];
1458 			iv8[14] = p8[14];
1459 			iv8[15] = p8[15];
1460 		} else {
1461 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1462 		}
1463 
1464 		aes_ctx->ac_lastp = (uint8_t *)&aes_ctx->ac_iv[0];
1465 		aes_ctx->ac_flags |= AES_CTR_MODE;
1466 	} else if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1467 		if (mechanism->cm_param != NULL) {
1468 			int rc;
1469 
1470 			ccm_param = (CK_AES_CCM_PARAMS *)mechanism->cm_param;
1471 
1472 			if ((rc = aes_ccm_validate_args(ccm_param,
1473 			    is_encrypt_init)) != 0) {
1474 				return (rc);
1475 			}
1476 
1477 			aes_ctx->ac_ccm_mac_len = ccm_param->ulMACSize;
1478 			if (is_encrypt_init) {
1479 				aes_ctx->ac_ccm_data_len
1480 				    = ccm_param->ulDataSize;
1481 			} else {
1482 				aes_ctx->ac_ccm_data_len =
1483 				    ccm_param->ulDataSize
1484 				    - aes_ctx->ac_ccm_mac_len;
1485 				aes_ctx->ac_ccm_processed_mac_len = 0;
1486 			}
1487 			aes_ctx->ac_ccm_processed_data_len = 0;
1488 
1489 			aes_ctx->ac_flags |= AES_CCM_MODE;
1490 		} else {
1491 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1492 		}
1493 	} else {
1494 		aes_ctx->ac_flags |= AES_ECB_MODE;
1495 	}
1496 
1497 	if (template == NULL) {
1498 		if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1499 			return (CRYPTO_HOST_MEMORY);
1500 		/*
1501 		 * Initialize key schedule.
1502 		 * Key length is stored in the key.
1503 		 */
1504 		if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1505 			kmem_free(keysched, size);
1506 			return (rv);
1507 		}
1508 
1509 		aes_ctx->ac_flags |= AES_PROVIDER_OWNS_KEY_SCHEDULE;
1510 		aes_ctx->ac_keysched_len = size;
1511 	} else {
1512 		keysched = template;
1513 	}
1514 	aes_ctx->ac_keysched = keysched;
1515 
1516 	/* process the nonce and associated data if it is AES CCM mode */
1517 	if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1518 		if (aes_ccm_init(aes_ctx, ccm_param->nonce,
1519 		    ccm_param->ulNonceSize, ccm_param->authData,
1520 		    ccm_param->ulAuthDataSize) != 0) {
1521 			bzero(keysched, size);
1522 			kmem_free(keysched, size);
1523 			return (CRYPTO_MECHANISM_PARAM_INVALID);
1524 		}
1525 		if (!is_encrypt_init) {
1526 			/* allocate buffer for storing decrypted plaintext */
1527 			aes_ctx->ac_ccm_pt_buf =
1528 			    kmem_alloc(aes_ctx->ac_ccm_data_len, kmflag);
1529 			if (aes_ctx->ac_ccm_pt_buf == NULL) {
1530 				bzero(keysched, size);
1531 				kmem_free(keysched, size);
1532 				return (CRYPTO_HOST_MEMORY);
1533 			}
1534 		}
1535 	}
1536 
1537 /* EXPORT DELETE END */
1538 
1539 	return (rv);
1540 }
1541