xref: /titanic_50/usr/src/uts/common/crypto/io/sha2_mod.c (revision 3893cb7fe5bfa1c9a4f7954517a917367f6cf081)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #pragma ident	"%Z%%M%	%I%	%E% SMI"
28 
29 #include <sys/modctl.h>
30 #include <sys/cmn_err.h>
31 #include <sys/crypto/common.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/strsun.h>
34 #include <sys/systm.h>
35 #include <sys/sysmacros.h>
36 #define	_SHA2_IMPL
37 #include <sys/sha2.h>
38 
39 /*
40  * The sha2 module is created with two modlinkages:
41  * - a modlmisc that allows consumers to directly call the entry points
42  *   SHA2Init, SHA2Update, and SHA2Final.
43  * - a modlcrypto that allows the module to register with the Kernel
44  *   Cryptographic Framework (KCF) as a software provider for the SHA2
45  *   mechanisms.
46  */
47 
48 static struct modlmisc modlmisc = {
49 	&mod_miscops,
50 	"SHA2 Message-Digest Algorithm"
51 };
52 
53 static struct modlcrypto modlcrypto = {
54 	&mod_cryptoops,
55 	"SHA2 Kernel SW Provider %I%"
56 };
57 
58 static struct modlinkage modlinkage = {
59 	MODREV_1, &modlmisc, &modlcrypto, NULL
60 };
61 
62 /*
63  * CSPI information (entry points, provider info, etc.)
64  */
65 
66 /*
67  * Context for SHA2 mechanism.
68  */
69 typedef struct sha2_ctx {
70 	sha2_mech_type_t	sc_mech_type;	/* type of context */
71 	SHA2_CTX		sc_sha2_ctx;	/* SHA2 context */
72 } sha2_ctx_t;
73 
74 /*
75  * Context for SHA2 HMAC and HMAC GENERAL mechanisms.
76  */
77 typedef struct sha2_hmac_ctx {
78 	sha2_mech_type_t	hc_mech_type;	/* type of context */
79 	uint32_t		hc_digest_len;	/* digest len in bytes */
80 	SHA2_CTX		hc_icontext;	/* inner SHA2 context */
81 	SHA2_CTX		hc_ocontext;	/* outer SHA2 context */
82 } sha2_hmac_ctx_t;
83 
84 /*
85  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
86  * by KCF to one of the entry points.
87  */
88 
89 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
90 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
91 
92 /* to extract the digest length passed as mechanism parameter */
93 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
94 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
95 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
96 	else {								\
97 		ulong_t tmp_ulong;					\
98 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
99 		(len) = (uint32_t)tmp_ulong;				\
100 	}								\
101 }
102 
103 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
104 	SHA2Init(mech, ctx);				\
105 	SHA2Update(ctx, key, len);			\
106 	SHA2Final(digest, ctx);				\
107 }
108 
109 /*
110  * Mechanism info structure passed to KCF during registration.
111  */
112 static crypto_mech_info_t sha2_mech_info_tab[] = {
113 	/* SHA256 */
114 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
115 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
116 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
117 	/* SHA256-HMAC */
118 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
119 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
120 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
121 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
122 	/* SHA256-HMAC GENERAL */
123 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
124 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
125 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
126 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
127 	/* SHA384 */
128 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
129 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
130 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
131 	/* SHA384-HMAC */
132 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
133 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
134 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
135 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
136 	/* SHA384-HMAC GENERAL */
137 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
138 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
139 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
140 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
141 	/* SHA512 */
142 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
143 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
144 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
145 	/* SHA512-HMAC */
146 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
147 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
148 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
149 	    CRYPTO_KEYSIZE_UNIT_IN_BITS},
150 	/* SHA512-HMAC GENERAL */
151 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
152 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
153 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
154 	    CRYPTO_KEYSIZE_UNIT_IN_BITS}
155 };
156 
157 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
158 
159 static crypto_control_ops_t sha2_control_ops = {
160 	sha2_provider_status
161 };
162 
163 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
164     crypto_req_handle_t);
165 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
166     crypto_req_handle_t);
167 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
168     crypto_req_handle_t);
169 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
170     crypto_req_handle_t);
171 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
172     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
173     crypto_req_handle_t);
174 
175 static crypto_digest_ops_t sha2_digest_ops = {
176 	sha2_digest_init,
177 	sha2_digest,
178 	sha2_digest_update,
179 	NULL,
180 	sha2_digest_final,
181 	sha2_digest_atomic
182 };
183 
184 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
185     crypto_spi_ctx_template_t, crypto_req_handle_t);
186 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
187     crypto_req_handle_t);
188 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
189 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
190     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
191     crypto_spi_ctx_template_t, crypto_req_handle_t);
192 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
193     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
194     crypto_spi_ctx_template_t, crypto_req_handle_t);
195 
196 static crypto_mac_ops_t sha2_mac_ops = {
197 	sha2_mac_init,
198 	NULL,
199 	sha2_mac_update,
200 	sha2_mac_final,
201 	sha2_mac_atomic,
202 	sha2_mac_verify_atomic
203 };
204 
205 static int sha2_create_ctx_template(crypto_provider_handle_t,
206     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
207     size_t *, crypto_req_handle_t);
208 static int sha2_free_context(crypto_ctx_t *);
209 
210 static crypto_ctx_ops_t sha2_ctx_ops = {
211 	sha2_create_ctx_template,
212 	sha2_free_context
213 };
214 
215 static crypto_ops_t sha2_crypto_ops = {
216 	&sha2_control_ops,
217 	&sha2_digest_ops,
218 	NULL,
219 	&sha2_mac_ops,
220 	NULL,
221 	NULL,
222 	NULL,
223 	NULL,
224 	NULL,
225 	NULL,
226 	NULL,
227 	NULL,
228 	NULL,
229 	&sha2_ctx_ops
230 };
231 
232 static crypto_provider_info_t sha2_prov_info = {
233 	CRYPTO_SPI_VERSION_1,
234 	"SHA2 Software Provider",
235 	CRYPTO_SW_PROVIDER,
236 	{&modlinkage},
237 	NULL,
238 	&sha2_crypto_ops,
239 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
240 	sha2_mech_info_tab
241 };
242 
243 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
244 
245 int
246 _init()
247 {
248 	int ret;
249 
250 	if ((ret = mod_install(&modlinkage)) != 0)
251 		return (ret);
252 
253 	/*
254 	 * Register with KCF. If the registration fails, log an
255 	 * error but do not uninstall the module, since the functionality
256 	 * provided by misc/sha2 should still be available.
257 	 */
258 	if ((ret = crypto_register_provider(&sha2_prov_info,
259 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
260 		cmn_err(CE_WARN, "sha2 _init: "
261 		    "crypto_register_provider() failed (0x%x)", ret);
262 
263 	return (0);
264 }
265 
266 int
267 _info(struct modinfo *modinfop)
268 {
269 	return (mod_info(&modlinkage, modinfop));
270 }
271 
272 /*
273  * KCF software provider control entry points.
274  */
275 /* ARGSUSED */
276 static void
277 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
278 {
279 	*status = CRYPTO_PROVIDER_READY;
280 }
281 
282 /*
283  * KCF software provider digest entry points.
284  */
285 
286 static int
287 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
288     crypto_req_handle_t req)
289 {
290 
291 	/*
292 	 * Allocate and initialize SHA2 context.
293 	 */
294 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
295 	    crypto_kmflag(req));
296 	if (ctx->cc_provider_private == NULL)
297 		return (CRYPTO_HOST_MEMORY);
298 
299 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
300 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
301 
302 	return (CRYPTO_SUCCESS);
303 }
304 
305 /*
306  * Helper SHA2 digest update function for uio data.
307  */
308 static int
309 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
310 {
311 	off_t offset = data->cd_offset;
312 	size_t length = data->cd_length;
313 	uint_t vec_idx;
314 	size_t cur_len;
315 
316 	/* we support only kernel buffer */
317 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
318 		return (CRYPTO_ARGUMENTS_BAD);
319 
320 	/*
321 	 * Jump to the first iovec containing data to be
322 	 * digested.
323 	 */
324 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
325 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
326 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len);
327 	if (vec_idx == data->cd_uio->uio_iovcnt) {
328 		/*
329 		 * The caller specified an offset that is larger than the
330 		 * total size of the buffers it provided.
331 		 */
332 		return (CRYPTO_DATA_LEN_RANGE);
333 	}
334 
335 	/*
336 	 * Now do the digesting on the iovecs.
337 	 */
338 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
339 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
340 		    offset, length);
341 
342 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
343 		    uio_iov[vec_idx].iov_base + offset, cur_len);
344 		length -= cur_len;
345 		vec_idx++;
346 		offset = 0;
347 	}
348 
349 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
350 		/*
351 		 * The end of the specified iovec's was reached but
352 		 * the length requested could not be processed, i.e.
353 		 * The caller requested to digest more data than it provided.
354 		 */
355 		return (CRYPTO_DATA_LEN_RANGE);
356 	}
357 
358 	return (CRYPTO_SUCCESS);
359 }
360 
361 /*
362  * Helper SHA2 digest final function for uio data.
363  * digest_len is the length of the desired digest. If digest_len
364  * is smaller than the default SHA2 digest length, the caller
365  * must pass a scratch buffer, digest_scratch, which must
366  * be at least the algorithm's digest length bytes.
367  */
368 static int
369 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
370     ulong_t digest_len, uchar_t *digest_scratch)
371 {
372 	off_t offset = digest->cd_offset;
373 	uint_t vec_idx;
374 
375 	/* we support only kernel buffer */
376 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
377 		return (CRYPTO_ARGUMENTS_BAD);
378 
379 	/*
380 	 * Jump to the first iovec containing ptr to the digest to
381 	 * be returned.
382 	 */
383 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
384 	    vec_idx < digest->cd_uio->uio_iovcnt;
385 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len);
386 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
387 		/*
388 		 * The caller specified an offset that is
389 		 * larger than the total size of the buffers
390 		 * it provided.
391 		 */
392 		return (CRYPTO_DATA_LEN_RANGE);
393 	}
394 
395 	if (offset + digest_len <=
396 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
397 		/*
398 		 * The computed SHA2 digest will fit in the current
399 		 * iovec.
400 		 */
401 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
402 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
403 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
404 			(digest_len != SHA512_DIGEST_LENGTH))) {
405 			/*
406 			 * The caller requested a short digest. Digest
407 			 * into a scratch buffer and return to
408 			 * the user only what was requested.
409 			 */
410 			SHA2Final(digest_scratch, sha2_ctx);
411 
412 			bcopy(digest_scratch, (uchar_t *)digest->
413 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
414 			    digest_len);
415 		} else {
416 			SHA2Final((uchar_t *)digest->
417 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
418 			    sha2_ctx);
419 
420 		}
421 	} else {
422 		/*
423 		 * The computed digest will be crossing one or more iovec's.
424 		 * This is bad performance-wise but we need to support it.
425 		 * Allocate a small scratch buffer on the stack and
426 		 * copy it piece meal to the specified digest iovec's.
427 		 */
428 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
429 		off_t scratch_offset = 0;
430 		size_t length = digest_len;
431 		size_t cur_len;
432 
433 		SHA2Final(digest_tmp, sha2_ctx);
434 
435 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
436 			cur_len =
437 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
438 				    offset, length);
439 			bcopy(digest_tmp + scratch_offset,
440 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
441 			    cur_len);
442 
443 			length -= cur_len;
444 			vec_idx++;
445 			scratch_offset += cur_len;
446 			offset = 0;
447 		}
448 
449 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
450 			/*
451 			 * The end of the specified iovec's was reached but
452 			 * the length requested could not be processed, i.e.
453 			 * The caller requested to digest more data than it
454 			 * provided.
455 			 */
456 			return (CRYPTO_DATA_LEN_RANGE);
457 		}
458 	}
459 
460 	return (CRYPTO_SUCCESS);
461 }
462 
463 /*
464  * Helper SHA2 digest update for mblk's.
465  */
466 static int
467 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
468 {
469 	off_t offset = data->cd_offset;
470 	size_t length = data->cd_length;
471 	mblk_t *mp;
472 	size_t cur_len;
473 
474 	/*
475 	 * Jump to the first mblk_t containing data to be digested.
476 	 */
477 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
478 	    offset -= MBLKL(mp), mp = mp->b_cont);
479 	if (mp == NULL) {
480 		/*
481 		 * The caller specified an offset that is larger than the
482 		 * total size of the buffers it provided.
483 		 */
484 		return (CRYPTO_DATA_LEN_RANGE);
485 	}
486 
487 	/*
488 	 * Now do the digesting on the mblk chain.
489 	 */
490 	while (mp != NULL && length > 0) {
491 		cur_len = MIN(MBLKL(mp) - offset, length);
492 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
493 		length -= cur_len;
494 		offset = 0;
495 		mp = mp->b_cont;
496 	}
497 
498 	if (mp == NULL && length > 0) {
499 		/*
500 		 * The end of the mblk was reached but the length requested
501 		 * could not be processed, i.e. The caller requested
502 		 * to digest more data than it provided.
503 		 */
504 		return (CRYPTO_DATA_LEN_RANGE);
505 	}
506 
507 	return (CRYPTO_SUCCESS);
508 }
509 
510 /*
511  * Helper SHA2 digest final for mblk's.
512  * digest_len is the length of the desired digest. If digest_len
513  * is smaller than the default SHA2 digest length, the caller
514  * must pass a scratch buffer, digest_scratch, which must
515  * be at least the algorithm's digest length bytes.
516  */
517 static int
518 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
519     ulong_t digest_len, uchar_t *digest_scratch)
520 {
521 	off_t offset = digest->cd_offset;
522 	mblk_t *mp;
523 
524 	/*
525 	 * Jump to the first mblk_t that will be used to store the digest.
526 	 */
527 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
528 	    offset -= MBLKL(mp), mp = mp->b_cont);
529 	if (mp == NULL) {
530 		/*
531 		 * The caller specified an offset that is larger than the
532 		 * total size of the buffers it provided.
533 		 */
534 		return (CRYPTO_DATA_LEN_RANGE);
535 	}
536 
537 	if (offset + digest_len <= MBLKL(mp)) {
538 		/*
539 		 * The computed SHA2 digest will fit in the current mblk.
540 		 * Do the SHA2Final() in-place.
541 		 */
542 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
543 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
544 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
545 			(digest_len != SHA512_DIGEST_LENGTH))) {
546 			/*
547 			 * The caller requested a short digest. Digest
548 			 * into a scratch buffer and return to
549 			 * the user only what was requested.
550 			 */
551 			SHA2Final(digest_scratch, sha2_ctx);
552 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
553 		} else {
554 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
555 		}
556 	} else {
557 		/*
558 		 * The computed digest will be crossing one or more mblk's.
559 		 * This is bad performance-wise but we need to support it.
560 		 * Allocate a small scratch buffer on the stack and
561 		 * copy it piece meal to the specified digest iovec's.
562 		 */
563 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
564 		off_t scratch_offset = 0;
565 		size_t length = digest_len;
566 		size_t cur_len;
567 
568 		SHA2Final(digest_tmp, sha2_ctx);
569 
570 		while (mp != NULL && length > 0) {
571 			cur_len = MIN(MBLKL(mp) - offset, length);
572 			bcopy(digest_tmp + scratch_offset,
573 			    mp->b_rptr + offset, cur_len);
574 
575 			length -= cur_len;
576 			mp = mp->b_cont;
577 			scratch_offset += cur_len;
578 			offset = 0;
579 		}
580 
581 		if (mp == NULL && length > 0) {
582 			/*
583 			 * The end of the specified mblk was reached but
584 			 * the length requested could not be processed, i.e.
585 			 * The caller requested to digest more data than it
586 			 * provided.
587 			 */
588 			return (CRYPTO_DATA_LEN_RANGE);
589 		}
590 	}
591 
592 	return (CRYPTO_SUCCESS);
593 }
594 
595 /* ARGSUSED */
596 static int
597 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
598     crypto_req_handle_t req)
599 {
600 	int ret = CRYPTO_SUCCESS;
601 	uint_t sha_digest_len;
602 
603 	ASSERT(ctx->cc_provider_private != NULL);
604 
605 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
606 	case SHA256_MECH_INFO_TYPE:
607 		sha_digest_len = SHA256_DIGEST_LENGTH;
608 		break;
609 	case SHA384_MECH_INFO_TYPE:
610 		sha_digest_len = SHA384_DIGEST_LENGTH;
611 		break;
612 	case SHA512_MECH_INFO_TYPE:
613 		sha_digest_len = SHA512_DIGEST_LENGTH;
614 		break;
615 	default:
616 		return (CRYPTO_MECHANISM_INVALID);
617 	}
618 
619 	/*
620 	 * We need to just return the length needed to store the output.
621 	 * We should not destroy the context for the following cases.
622 	 */
623 	if ((digest->cd_length == 0) ||
624 	    (digest->cd_length < sha_digest_len)) {
625 		digest->cd_length = sha_digest_len;
626 		return (CRYPTO_BUFFER_TOO_SMALL);
627 	}
628 
629 	/*
630 	 * Do the SHA2 update on the specified input data.
631 	 */
632 	switch (data->cd_format) {
633 	case CRYPTO_DATA_RAW:
634 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
635 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
636 		    data->cd_length);
637 		break;
638 	case CRYPTO_DATA_UIO:
639 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
640 		    data);
641 		break;
642 	case CRYPTO_DATA_MBLK:
643 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
644 		    data);
645 		break;
646 	default:
647 		ret = CRYPTO_ARGUMENTS_BAD;
648 	}
649 
650 	if (ret != CRYPTO_SUCCESS) {
651 		/* the update failed, free context and bail */
652 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
653 		ctx->cc_provider_private = NULL;
654 		digest->cd_length = 0;
655 		return (ret);
656 	}
657 
658 	/*
659 	 * Do a SHA2 final, must be done separately since the digest
660 	 * type can be different than the input data type.
661 	 */
662 	switch (digest->cd_format) {
663 	case CRYPTO_DATA_RAW:
664 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
665 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
666 		break;
667 	case CRYPTO_DATA_UIO:
668 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
669 		    digest, sha_digest_len, NULL);
670 		break;
671 	case CRYPTO_DATA_MBLK:
672 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
673 		    digest, sha_digest_len, NULL);
674 		break;
675 	default:
676 		ret = CRYPTO_ARGUMENTS_BAD;
677 	}
678 
679 	/* all done, free context and return */
680 
681 	if (ret == CRYPTO_SUCCESS)
682 		digest->cd_length = sha_digest_len;
683 	else
684 		digest->cd_length = 0;
685 
686 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
687 	ctx->cc_provider_private = NULL;
688 	return (ret);
689 }
690 
691 /* ARGSUSED */
692 static int
693 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
694     crypto_req_handle_t req)
695 {
696 	int ret = CRYPTO_SUCCESS;
697 
698 	ASSERT(ctx->cc_provider_private != NULL);
699 
700 	/*
701 	 * Do the SHA2 update on the specified input data.
702 	 */
703 	switch (data->cd_format) {
704 	case CRYPTO_DATA_RAW:
705 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
706 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
707 		    data->cd_length);
708 		break;
709 	case CRYPTO_DATA_UIO:
710 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
711 		    data);
712 		break;
713 	case CRYPTO_DATA_MBLK:
714 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
715 		    data);
716 		break;
717 	default:
718 		ret = CRYPTO_ARGUMENTS_BAD;
719 	}
720 
721 	return (ret);
722 }
723 
724 /* ARGSUSED */
725 static int
726 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
727     crypto_req_handle_t req)
728 {
729 	int ret = CRYPTO_SUCCESS;
730 	uint_t sha_digest_len;
731 
732 	ASSERT(ctx->cc_provider_private != NULL);
733 
734 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
735 	case SHA256_MECH_INFO_TYPE:
736 		sha_digest_len = SHA256_DIGEST_LENGTH;
737 		break;
738 	case SHA384_MECH_INFO_TYPE:
739 		sha_digest_len = SHA384_DIGEST_LENGTH;
740 		break;
741 	case SHA512_MECH_INFO_TYPE:
742 		sha_digest_len = SHA512_DIGEST_LENGTH;
743 		break;
744 	default:
745 		return (CRYPTO_MECHANISM_INVALID);
746 	}
747 
748 	/*
749 	 * We need to just return the length needed to store the output.
750 	 * We should not destroy the context for the following cases.
751 	 */
752 	if ((digest->cd_length == 0) ||
753 	    (digest->cd_length < sha_digest_len)) {
754 		digest->cd_length = sha_digest_len;
755 		return (CRYPTO_BUFFER_TOO_SMALL);
756 	}
757 
758 	/*
759 	 * Do a SHA2 final.
760 	 */
761 	switch (digest->cd_format) {
762 	case CRYPTO_DATA_RAW:
763 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
764 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
765 		break;
766 	case CRYPTO_DATA_UIO:
767 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
768 		    digest, sha_digest_len, NULL);
769 		break;
770 	case CRYPTO_DATA_MBLK:
771 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
772 		    digest, sha_digest_len, NULL);
773 		break;
774 	default:
775 		ret = CRYPTO_ARGUMENTS_BAD;
776 	}
777 
778 	/* all done, free context and return */
779 
780 	if (ret == CRYPTO_SUCCESS)
781 		digest->cd_length = sha_digest_len;
782 	else
783 		digest->cd_length = 0;
784 
785 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
786 	ctx->cc_provider_private = NULL;
787 
788 	return (ret);
789 }
790 
791 /* ARGSUSED */
792 static int
793 sha2_digest_atomic(crypto_provider_handle_t provider,
794     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
795     crypto_data_t *data, crypto_data_t *digest,
796     crypto_req_handle_t req)
797 {
798 	int ret = CRYPTO_SUCCESS;
799 	SHA2_CTX sha2_ctx;
800 	uint32_t sha_digest_len;
801 
802 	/*
803 	 * Do the SHA inits.
804 	 */
805 
806 	SHA2Init(mechanism->cm_type, &sha2_ctx);
807 
808 	switch (data->cd_format) {
809 	case CRYPTO_DATA_RAW:
810 		SHA2Update(&sha2_ctx, (uint8_t *)data->
811 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
812 		break;
813 	case CRYPTO_DATA_UIO:
814 		ret = sha2_digest_update_uio(&sha2_ctx, data);
815 		break;
816 	case CRYPTO_DATA_MBLK:
817 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
818 		break;
819 	default:
820 		ret = CRYPTO_ARGUMENTS_BAD;
821 	}
822 
823 	/*
824 	 * Do the SHA updates on the specified input data.
825 	 */
826 
827 	if (ret != CRYPTO_SUCCESS) {
828 		/* the update failed, bail */
829 		digest->cd_length = 0;
830 		return (ret);
831 	}
832 
833 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
834 		sha_digest_len = SHA256_DIGEST_LENGTH;
835 	else
836 		sha_digest_len = SHA512_DIGEST_LENGTH;
837 
838 	/*
839 	 * Do a SHA2 final, must be done separately since the digest
840 	 * type can be different than the input data type.
841 	 */
842 	switch (digest->cd_format) {
843 	case CRYPTO_DATA_RAW:
844 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
845 		    digest->cd_offset, &sha2_ctx);
846 		break;
847 	case CRYPTO_DATA_UIO:
848 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
849 		    sha_digest_len, NULL);
850 		break;
851 	case CRYPTO_DATA_MBLK:
852 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
853 		    sha_digest_len, NULL);
854 		break;
855 	default:
856 		ret = CRYPTO_ARGUMENTS_BAD;
857 	}
858 
859 	if (ret == CRYPTO_SUCCESS)
860 		digest->cd_length = sha_digest_len;
861 	else
862 		digest->cd_length = 0;
863 
864 	return (ret);
865 }
866 
867 /*
868  * KCF software provider mac entry points.
869  *
870  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
871  *
872  * Init:
873  * The initialization routine initializes what we denote
874  * as the inner and outer contexts by doing
875  * - for inner context: SHA2(key XOR ipad)
876  * - for outer context: SHA2(key XOR opad)
877  *
878  * Update:
879  * Each subsequent SHA2 HMAC update will result in an
880  * update of the inner context with the specified data.
881  *
882  * Final:
883  * The SHA2 HMAC final will do a SHA2 final operation on the
884  * inner context, and the resulting digest will be used
885  * as the data for an update on the outer context. Last
886  * but not least, a SHA2 final on the outer context will
887  * be performed to obtain the SHA2 HMAC digest to return
888  * to the user.
889  */
890 
891 /*
892  * Initialize a SHA2-HMAC context.
893  */
894 static void
895 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
896 {
897 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
898 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
899 	int i, block_size, blocks_per_int64;
900 
901 	/* Determine the block size */
902 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
903 		block_size = SHA256_HMAC_BLOCK_SIZE;
904 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
905 	} else {
906 		block_size = SHA512_HMAC_BLOCK_SIZE;
907 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
908 	}
909 
910 	(void) bzero(ipad, block_size);
911 	(void) bzero(opad, block_size);
912 	(void) bcopy(keyval, ipad, length_in_bytes);
913 	(void) bcopy(keyval, opad, length_in_bytes);
914 
915 	/* XOR key with ipad (0x36) and opad (0x5c) */
916 	for (i = 0; i < blocks_per_int64; i ++) {
917 		ipad[i] ^= 0x3636363636363636;
918 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
919 	}
920 
921 	/* perform SHA2 on ipad */
922 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
923 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
924 
925 	/* perform SHA2 on opad */
926 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
927 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
928 
929 }
930 
931 /*
932  */
933 static int
934 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
935     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
936     crypto_req_handle_t req)
937 {
938 	int ret = CRYPTO_SUCCESS;
939 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
940 	uint_t sha_digest_len, sha_hmac_block_size;
941 
942 	/*
943 	 * Set the digest length and block size to values approriate to the
944 	 * mechanism
945 	 */
946 	switch (mechanism->cm_type) {
947 	case SHA256_HMAC_MECH_INFO_TYPE:
948 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
949 		sha_digest_len = SHA256_DIGEST_LENGTH;
950 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
951 		break;
952 	case SHA384_HMAC_MECH_INFO_TYPE:
953 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
954 	case SHA512_HMAC_MECH_INFO_TYPE:
955 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
956 		sha_digest_len = SHA512_DIGEST_LENGTH;
957 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
958 		break;
959 	default:
960 		return (CRYPTO_MECHANISM_INVALID);
961 	}
962 
963 	if (key->ck_format != CRYPTO_KEY_RAW)
964 		return (CRYPTO_ARGUMENTS_BAD);
965 
966 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
967 	    crypto_kmflag(req));
968 	if (ctx->cc_provider_private == NULL)
969 		return (CRYPTO_HOST_MEMORY);
970 
971 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
972 	if (ctx_template != NULL) {
973 		/* reuse context template */
974 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
975 		    sizeof (sha2_hmac_ctx_t));
976 	} else {
977 		/* no context template, compute context */
978 		if (keylen_in_bytes > sha_hmac_block_size) {
979 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
980 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
981 
982 			/*
983 			 * Hash the passed-in key to get a smaller key.
984 			 * The inner context is used since it hasn't been
985 			 * initialized yet.
986 			 */
987 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
988 			    &hmac_ctx->hc_icontext,
989 			    key->ck_data, keylen_in_bytes, digested_key);
990 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
991 			    digested_key, sha_digest_len);
992 		} else {
993 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
994 			    key->ck_data, keylen_in_bytes);
995 		}
996 	}
997 
998 	/*
999 	 * Get the mechanism parameters, if applicable.
1000 	 */
1001 	if (mechanism->cm_type % 3 == 2) {
1002 		if (mechanism->cm_param == NULL ||
1003 		    mechanism->cm_param_len != sizeof (ulong_t))
1004 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1005 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
1006 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
1007 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
1008 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1009 	}
1010 
1011 	if (ret != CRYPTO_SUCCESS) {
1012 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1013 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1014 		ctx->cc_provider_private = NULL;
1015 	}
1016 
1017 	return (ret);
1018 }
1019 
1020 /* ARGSUSED */
1021 static int
1022 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1023     crypto_req_handle_t req)
1024 {
1025 	int ret = CRYPTO_SUCCESS;
1026 
1027 	ASSERT(ctx->cc_provider_private != NULL);
1028 
1029 	/*
1030 	 * Do a SHA2 update of the inner context using the specified
1031 	 * data.
1032 	 */
1033 	switch (data->cd_format) {
1034 	case CRYPTO_DATA_RAW:
1035 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1036 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1037 		    data->cd_length);
1038 		break;
1039 	case CRYPTO_DATA_UIO:
1040 		ret = sha2_digest_update_uio(
1041 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1042 		break;
1043 	case CRYPTO_DATA_MBLK:
1044 		ret = sha2_digest_update_mblk(
1045 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1046 		break;
1047 	default:
1048 		ret = CRYPTO_ARGUMENTS_BAD;
1049 	}
1050 
1051 	return (ret);
1052 }
1053 
1054 /* ARGSUSED */
1055 static int
1056 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1057 {
1058 	int ret = CRYPTO_SUCCESS;
1059 	uchar_t digest[SHA512_DIGEST_LENGTH];
1060 	uint32_t digest_len, sha_digest_len;
1061 
1062 	ASSERT(ctx->cc_provider_private != NULL);
1063 
1064 	/* Set the digest lengths to values approriate to the mechanism */
1065 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1066 	case SHA256_HMAC_MECH_INFO_TYPE:
1067 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1068 		break;
1069 	case SHA384_HMAC_MECH_INFO_TYPE:
1070 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1071 		break;
1072 	case SHA512_HMAC_MECH_INFO_TYPE:
1073 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1074 		break;
1075 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1076 		sha_digest_len = SHA256_DIGEST_LENGTH;
1077 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1078 		break;
1079 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1080 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1081 		sha_digest_len = SHA512_DIGEST_LENGTH;
1082 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1083 		break;
1084 	}
1085 
1086 	/*
1087 	 * We need to just return the length needed to store the output.
1088 	 * We should not destroy the context for the following cases.
1089 	 */
1090 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1091 		mac->cd_length = digest_len;
1092 		return (CRYPTO_BUFFER_TOO_SMALL);
1093 	}
1094 
1095 	/*
1096 	 * Do a SHA2 final on the inner context.
1097 	 */
1098 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1099 
1100 	/*
1101 	 * Do a SHA2 update on the outer context, feeding the inner
1102 	 * digest as data.
1103 	 */
1104 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1105 	    sha_digest_len);
1106 
1107 	/*
1108 	 * Do a SHA2 final on the outer context, storing the computing
1109 	 * digest in the users buffer.
1110 	 */
1111 	switch (mac->cd_format) {
1112 	case CRYPTO_DATA_RAW:
1113 		if (digest_len != sha_digest_len) {
1114 			/*
1115 			 * The caller requested a short digest. Digest
1116 			 * into a scratch buffer and return to
1117 			 * the user only what was requested.
1118 			 */
1119 			SHA2Final(digest,
1120 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1121 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1122 			    mac->cd_offset, digest_len);
1123 		} else {
1124 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1125 			    mac->cd_offset,
1126 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1127 		}
1128 		break;
1129 	case CRYPTO_DATA_UIO:
1130 		ret = sha2_digest_final_uio(
1131 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1132 		    digest_len, digest);
1133 		break;
1134 	case CRYPTO_DATA_MBLK:
1135 		ret = sha2_digest_final_mblk(
1136 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1137 		    digest_len, digest);
1138 		break;
1139 	default:
1140 		ret = CRYPTO_ARGUMENTS_BAD;
1141 	}
1142 
1143 	if (ret == CRYPTO_SUCCESS)
1144 		mac->cd_length = digest_len;
1145 	else
1146 		mac->cd_length = 0;
1147 
1148 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1149 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1150 	ctx->cc_provider_private = NULL;
1151 
1152 	return (ret);
1153 }
1154 
1155 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1156 	switch (data->cd_format) {					\
1157 	case CRYPTO_DATA_RAW:						\
1158 		SHA2Update(&(ctx).hc_icontext,				\
1159 		    (uint8_t *)data->cd_raw.iov_base +			\
1160 		    data->cd_offset, data->cd_length);			\
1161 		break;							\
1162 	case CRYPTO_DATA_UIO:						\
1163 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1164 		break;							\
1165 	case CRYPTO_DATA_MBLK:						\
1166 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1167 		    data);						\
1168 		break;							\
1169 	default:							\
1170 		ret = CRYPTO_ARGUMENTS_BAD;				\
1171 	}								\
1172 }
1173 
1174 /* ARGSUSED */
1175 static int
1176 sha2_mac_atomic(crypto_provider_handle_t provider,
1177     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1178     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1179     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1180 {
1181 	int ret = CRYPTO_SUCCESS;
1182 	uchar_t digest[SHA512_DIGEST_LENGTH];
1183 	sha2_hmac_ctx_t sha2_hmac_ctx;
1184 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1185 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1186 
1187 	/*
1188 	 * Set the digest length and block size to values approriate to the
1189 	 * mechanism
1190 	 */
1191 	switch (mechanism->cm_type) {
1192 	case SHA256_HMAC_MECH_INFO_TYPE:
1193 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1194 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1195 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1196 		break;
1197 	case SHA384_HMAC_MECH_INFO_TYPE:
1198 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1199 	case SHA512_HMAC_MECH_INFO_TYPE:
1200 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1201 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1202 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1203 		break;
1204 	default:
1205 		return (CRYPTO_MECHANISM_INVALID);
1206 	}
1207 
1208 	/* Add support for key by attributes (RFE 4706552) */
1209 	if (key->ck_format != CRYPTO_KEY_RAW)
1210 		return (CRYPTO_ARGUMENTS_BAD);
1211 
1212 	if (ctx_template != NULL) {
1213 		/* reuse context template */
1214 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1215 	} else {
1216 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1217 		/* no context template, initialize context */
1218 		if (keylen_in_bytes > sha_hmac_block_size) {
1219 			/*
1220 			 * Hash the passed-in key to get a smaller key.
1221 			 * The inner context is used since it hasn't been
1222 			 * initialized yet.
1223 			 */
1224 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1225 			    &sha2_hmac_ctx.hc_icontext,
1226 			    key->ck_data, keylen_in_bytes, digest);
1227 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1228 			    sha_digest_len);
1229 		} else {
1230 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1231 			    keylen_in_bytes);
1232 		}
1233 	}
1234 
1235 	/* get the mechanism parameters, if applicable */
1236 	if ((mechanism->cm_type % 3) == 2) {
1237 		if (mechanism->cm_param == NULL ||
1238 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1239 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1240 			goto bail;
1241 		}
1242 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1243 		if (digest_len > sha_digest_len) {
1244 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1245 			goto bail;
1246 		}
1247 	}
1248 
1249 	/* do a SHA2 update of the inner context using the specified data */
1250 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1251 	if (ret != CRYPTO_SUCCESS)
1252 		/* the update failed, free context and bail */
1253 		goto bail;
1254 
1255 	/*
1256 	 * Do a SHA2 final on the inner context.
1257 	 */
1258 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1259 
1260 	/*
1261 	 * Do an SHA2 update on the outer context, feeding the inner
1262 	 * digest as data.
1263 	 *
1264 	 * Make sure that SHA384 is handled special because
1265 	 * it cannot feed a 60-byte inner hash to the outer
1266 	 */
1267 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1268 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1269 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1270 		    SHA384_DIGEST_LENGTH);
1271 	else
1272 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1273 
1274 	/*
1275 	 * Do a SHA2 final on the outer context, storing the computed
1276 	 * digest in the users buffer.
1277 	 */
1278 	switch (mac->cd_format) {
1279 	case CRYPTO_DATA_RAW:
1280 		if (digest_len != sha_digest_len) {
1281 			/*
1282 			 * The caller requested a short digest. Digest
1283 			 * into a scratch buffer and return to
1284 			 * the user only what was requested.
1285 			 */
1286 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1287 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1288 			    mac->cd_offset, digest_len);
1289 		} else {
1290 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1291 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1292 		}
1293 		break;
1294 	case CRYPTO_DATA_UIO:
1295 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1296 		    digest_len, digest);
1297 		break;
1298 	case CRYPTO_DATA_MBLK:
1299 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1300 		    digest_len, digest);
1301 		break;
1302 	default:
1303 		ret = CRYPTO_ARGUMENTS_BAD;
1304 	}
1305 
1306 	if (ret == CRYPTO_SUCCESS) {
1307 		mac->cd_length = digest_len;
1308 		return (CRYPTO_SUCCESS);
1309 	}
1310 bail:
1311 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1312 	mac->cd_length = 0;
1313 	return (ret);
1314 }
1315 
1316 /* ARGSUSED */
1317 static int
1318 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1319     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1320     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1321     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1322 {
1323 	int ret = CRYPTO_SUCCESS;
1324 	uchar_t digest[SHA512_DIGEST_LENGTH];
1325 	sha2_hmac_ctx_t sha2_hmac_ctx;
1326 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1327 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1328 
1329 	/*
1330 	 * Set the digest length and block size to values approriate to the
1331 	 * mechanism
1332 	 */
1333 	switch (mechanism->cm_type) {
1334 	case SHA256_HMAC_MECH_INFO_TYPE:
1335 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1336 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1337 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1338 		break;
1339 	case SHA384_HMAC_MECH_INFO_TYPE:
1340 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1341 	case SHA512_HMAC_MECH_INFO_TYPE:
1342 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1343 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1344 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1345 		break;
1346 	default:
1347 		return (CRYPTO_MECHANISM_INVALID);
1348 	}
1349 
1350 	/* Add support for key by attributes (RFE 4706552) */
1351 	if (key->ck_format != CRYPTO_KEY_RAW)
1352 		return (CRYPTO_ARGUMENTS_BAD);
1353 
1354 	if (ctx_template != NULL) {
1355 		/* reuse context template */
1356 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1357 	} else {
1358 		/* no context template, initialize context */
1359 		if (keylen_in_bytes > sha_hmac_block_size) {
1360 			/*
1361 			 * Hash the passed-in key to get a smaller key.
1362 			 * The inner context is used since it hasn't been
1363 			 * initialized yet.
1364 			 */
1365 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1366 			    &sha2_hmac_ctx.hc_icontext,
1367 			    key->ck_data, keylen_in_bytes, digest);
1368 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1369 			    sha_digest_len);
1370 		} else {
1371 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1372 			    keylen_in_bytes);
1373 		}
1374 	}
1375 
1376 	/* get the mechanism parameters, if applicable */
1377 	if (mechanism->cm_type % 3 == 2) {
1378 		if (mechanism->cm_param == NULL ||
1379 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1380 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1381 			goto bail;
1382 		}
1383 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1384 		if (digest_len > sha_digest_len) {
1385 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1386 			goto bail;
1387 		}
1388 	}
1389 
1390 	if (mac->cd_length != digest_len) {
1391 		ret = CRYPTO_INVALID_MAC;
1392 		goto bail;
1393 	}
1394 
1395 	/* do a SHA2 update of the inner context using the specified data */
1396 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1397 	if (ret != CRYPTO_SUCCESS)
1398 		/* the update failed, free context and bail */
1399 		goto bail;
1400 
1401 	/* do a SHA2 final on the inner context */
1402 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1403 
1404 	/*
1405 	 * Do an SHA2 update on the outer context, feeding the inner
1406 	 * digest as data.
1407 	 */
1408 	SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1409 
1410 	/*
1411 	 * Do a SHA2 final on the outer context, storing the computed
1412 	 * digest in the users buffer.
1413 	 */
1414 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1415 
1416 	/*
1417 	 * Compare the computed digest against the expected digest passed
1418 	 * as argument.
1419 	 */
1420 
1421 	switch (mac->cd_format) {
1422 
1423 	case CRYPTO_DATA_RAW:
1424 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1425 		    mac->cd_offset, digest_len) != 0)
1426 			ret = CRYPTO_INVALID_MAC;
1427 		break;
1428 
1429 	case CRYPTO_DATA_UIO: {
1430 		off_t offset = mac->cd_offset;
1431 		uint_t vec_idx;
1432 		off_t scratch_offset = 0;
1433 		size_t length = digest_len;
1434 		size_t cur_len;
1435 
1436 		/* we support only kernel buffer */
1437 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1438 			return (CRYPTO_ARGUMENTS_BAD);
1439 
1440 		/* jump to the first iovec containing the expected digest */
1441 		for (vec_idx = 0;
1442 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1443 		    vec_idx < mac->cd_uio->uio_iovcnt;
1444 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len);
1445 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
1446 			/*
1447 			 * The caller specified an offset that is
1448 			 * larger than the total size of the buffers
1449 			 * it provided.
1450 			 */
1451 			ret = CRYPTO_DATA_LEN_RANGE;
1452 			break;
1453 		}
1454 
1455 		/* do the comparison of computed digest vs specified one */
1456 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1457 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1458 			    offset, length);
1459 
1460 			if (bcmp(digest + scratch_offset,
1461 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1462 			    cur_len) != 0) {
1463 				ret = CRYPTO_INVALID_MAC;
1464 				break;
1465 			}
1466 
1467 			length -= cur_len;
1468 			vec_idx++;
1469 			scratch_offset += cur_len;
1470 			offset = 0;
1471 		}
1472 		break;
1473 	}
1474 
1475 	case CRYPTO_DATA_MBLK: {
1476 		off_t offset = mac->cd_offset;
1477 		mblk_t *mp;
1478 		off_t scratch_offset = 0;
1479 		size_t length = digest_len;
1480 		size_t cur_len;
1481 
1482 		/* jump to the first mblk_t containing the expected digest */
1483 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1484 		    offset -= MBLKL(mp), mp = mp->b_cont);
1485 		if (mp == NULL) {
1486 			/*
1487 			 * The caller specified an offset that is larger than
1488 			 * the total size of the buffers it provided.
1489 			 */
1490 			ret = CRYPTO_DATA_LEN_RANGE;
1491 			break;
1492 		}
1493 
1494 		while (mp != NULL && length > 0) {
1495 			cur_len = MIN(MBLKL(mp) - offset, length);
1496 			if (bcmp(digest + scratch_offset,
1497 			    mp->b_rptr + offset, cur_len) != 0) {
1498 				ret = CRYPTO_INVALID_MAC;
1499 				break;
1500 			}
1501 
1502 			length -= cur_len;
1503 			mp = mp->b_cont;
1504 			scratch_offset += cur_len;
1505 			offset = 0;
1506 		}
1507 		break;
1508 	}
1509 
1510 	default:
1511 		ret = CRYPTO_ARGUMENTS_BAD;
1512 	}
1513 
1514 	return (ret);
1515 bail:
1516 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1517 	mac->cd_length = 0;
1518 	return (ret);
1519 }
1520 
1521 /*
1522  * KCF software provider context management entry points.
1523  */
1524 
1525 /* ARGSUSED */
1526 static int
1527 sha2_create_ctx_template(crypto_provider_handle_t provider,
1528     crypto_mechanism_t *mechanism, crypto_key_t *key,
1529     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1530     crypto_req_handle_t req)
1531 {
1532 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1533 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1534 	uint32_t sha_digest_len, sha_hmac_block_size;
1535 
1536 	/*
1537 	 * Set the digest length and block size to values approriate to the
1538 	 * mechanism
1539 	 */
1540 	switch (mechanism->cm_type) {
1541 	case SHA256_HMAC_MECH_INFO_TYPE:
1542 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1543 		sha_digest_len = SHA256_DIGEST_LENGTH;
1544 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1545 		break;
1546 	case SHA384_HMAC_MECH_INFO_TYPE:
1547 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1548 	case SHA512_HMAC_MECH_INFO_TYPE:
1549 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1550 		sha_digest_len = SHA512_DIGEST_LENGTH;
1551 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1552 		break;
1553 	default:
1554 		return (CRYPTO_MECHANISM_INVALID);
1555 	}
1556 
1557 	/* Add support for key by attributes (RFE 4706552) */
1558 	if (key->ck_format != CRYPTO_KEY_RAW)
1559 		return (CRYPTO_ARGUMENTS_BAD);
1560 
1561 	/*
1562 	 * Allocate and initialize SHA2 context.
1563 	 */
1564 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1565 	    crypto_kmflag(req));
1566 	if (sha2_hmac_ctx_tmpl == NULL)
1567 		return (CRYPTO_HOST_MEMORY);
1568 
1569 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1570 
1571 	if (keylen_in_bytes > sha_hmac_block_size) {
1572 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1573 
1574 		/*
1575 		 * Hash the passed-in key to get a smaller key.
1576 		 * The inner context is used since it hasn't been
1577 		 * initialized yet.
1578 		 */
1579 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1580 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1581 		    key->ck_data, keylen_in_bytes, digested_key);
1582 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1583 		    sha_digest_len);
1584 	} else {
1585 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1586 		    keylen_in_bytes);
1587 	}
1588 
1589 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1590 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1591 
1592 	return (CRYPTO_SUCCESS);
1593 }
1594 
1595 static int
1596 sha2_free_context(crypto_ctx_t *ctx)
1597 {
1598 	uint_t ctx_len;
1599 
1600 	if (ctx->cc_provider_private == NULL)
1601 		return (CRYPTO_SUCCESS);
1602 
1603 	/*
1604 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1605 	 * have different lengths.
1606 	 *
1607 	 * Note: Below is dependent on the mechanism ordering.
1608 	 */
1609 
1610 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1611 		ctx_len = sizeof (sha2_ctx_t);
1612 	else
1613 		ctx_len = sizeof (sha2_hmac_ctx_t);
1614 
1615 	bzero(ctx->cc_provider_private, ctx_len);
1616 	kmem_free(ctx->cc_provider_private, ctx_len);
1617 	ctx->cc_provider_private = NULL;
1618 
1619 	return (CRYPTO_SUCCESS);
1620 }
1621