xref: /titanic_41/usr/src/uts/common/crypto/io/sha2_mod.c (revision f6e214c7418f43af38bd8c3a557e3d0a1d311cfa)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/modctl.h>
28 #include <sys/cmn_err.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/strsun.h>
32 #include <sys/systm.h>
33 #include <sys/sysmacros.h>
34 #define	_SHA2_IMPL
35 #include <sys/sha2.h>
36 #include <sha2/sha2_impl.h>
37 
38 /*
39  * The sha2 module is created with two modlinkages:
40  * - a modlmisc that allows consumers to directly call the entry points
41  *   SHA2Init, SHA2Update, and SHA2Final.
42  * - a modlcrypto that allows the module to register with the Kernel
43  *   Cryptographic Framework (KCF) as a software provider for the SHA2
44  *   mechanisms.
45  */
46 
47 static struct modlmisc modlmisc = {
48 	&mod_miscops,
49 	"SHA2 Message-Digest Algorithm"
50 };
51 
52 static struct modlcrypto modlcrypto = {
53 	&mod_cryptoops,
54 	"SHA2 Kernel SW Provider"
55 };
56 
57 static struct modlinkage modlinkage = {
58 	MODREV_1, &modlmisc, &modlcrypto, NULL
59 };
60 
61 /*
62  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
63  * by KCF to one of the entry points.
64  */
65 
66 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
67 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
68 
69 /* to extract the digest length passed as mechanism parameter */
70 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
71 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
72 		(len) = (uint32_t)*((ulong_t *)(void *)(m)->cm_param);	\
73 	else {								\
74 		ulong_t tmp_ulong;					\
75 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
76 		(len) = (uint32_t)tmp_ulong;				\
77 	}								\
78 }
79 
80 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
81 	SHA2Init(mech, ctx);				\
82 	SHA2Update(ctx, key, len);			\
83 	SHA2Final(digest, ctx);				\
84 }
85 
86 /*
87  * Mechanism info structure passed to KCF during registration.
88  */
89 static crypto_mech_info_t sha2_mech_info_tab[] = {
90 	/* SHA256 */
91 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
92 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
93 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
94 	/* SHA256-HMAC */
95 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
96 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
97 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
98 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 	/* SHA256-HMAC GENERAL */
100 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
101 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
102 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
103 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
104 	/* SHA384 */
105 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
106 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
107 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
108 	/* SHA384-HMAC */
109 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
110 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
111 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
112 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
113 	/* SHA384-HMAC GENERAL */
114 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
115 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
116 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
117 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
118 	/* SHA512 */
119 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
120 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
121 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
122 	/* SHA512-HMAC */
123 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
124 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
125 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
126 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
127 	/* SHA512-HMAC GENERAL */
128 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
129 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
130 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
131 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES}
132 };
133 
134 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
135 
136 static crypto_control_ops_t sha2_control_ops = {
137 	sha2_provider_status
138 };
139 
140 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
141     crypto_req_handle_t);
142 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
143     crypto_req_handle_t);
144 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
145     crypto_req_handle_t);
146 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
147     crypto_req_handle_t);
148 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
149     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
150     crypto_req_handle_t);
151 
152 static crypto_digest_ops_t sha2_digest_ops = {
153 	sha2_digest_init,
154 	sha2_digest,
155 	sha2_digest_update,
156 	NULL,
157 	sha2_digest_final,
158 	sha2_digest_atomic
159 };
160 
161 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
162     crypto_spi_ctx_template_t, crypto_req_handle_t);
163 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
164     crypto_req_handle_t);
165 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
166 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
167     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
168     crypto_spi_ctx_template_t, crypto_req_handle_t);
169 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
170     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
171     crypto_spi_ctx_template_t, crypto_req_handle_t);
172 
173 static crypto_mac_ops_t sha2_mac_ops = {
174 	sha2_mac_init,
175 	NULL,
176 	sha2_mac_update,
177 	sha2_mac_final,
178 	sha2_mac_atomic,
179 	sha2_mac_verify_atomic
180 };
181 
182 static int sha2_create_ctx_template(crypto_provider_handle_t,
183     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
184     size_t *, crypto_req_handle_t);
185 static int sha2_free_context(crypto_ctx_t *);
186 
187 static crypto_ctx_ops_t sha2_ctx_ops = {
188 	sha2_create_ctx_template,
189 	sha2_free_context
190 };
191 
192 static void sha2_POST(int *);
193 
194 static crypto_fips140_ops_t sha2_fips140_ops = {
195 	sha2_POST
196 };
197 
198 static crypto_ops_t sha2_crypto_ops = {
199 	&sha2_control_ops,
200 	&sha2_digest_ops,
201 	NULL,
202 	&sha2_mac_ops,
203 	NULL,
204 	NULL,
205 	NULL,
206 	NULL,
207 	NULL,
208 	NULL,
209 	NULL,
210 	NULL,
211 	NULL,
212 	&sha2_ctx_ops,
213 	NULL,
214 	NULL,
215 	&sha2_fips140_ops
216 };
217 
218 static crypto_provider_info_t sha2_prov_info = {
219 	CRYPTO_SPI_VERSION_4,
220 	"SHA2 Software Provider",
221 	CRYPTO_SW_PROVIDER,
222 	{&modlinkage},
223 	NULL,
224 	&sha2_crypto_ops,
225 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
226 	sha2_mech_info_tab
227 };
228 
229 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
230 
231 int
232 _init()
233 {
234 	int ret;
235 
236 	if ((ret = mod_install(&modlinkage)) != 0)
237 		return (ret);
238 
239 	/*
240 	 * Register with KCF. If the registration fails, do not uninstall the
241 	 * module, since the functionality provided by misc/sha2 should still
242 	 * be available.
243 	 */
244 	(void) crypto_register_provider(&sha2_prov_info, &sha2_prov_handle);
245 
246 	return (0);
247 }
248 
249 int
250 _info(struct modinfo *modinfop)
251 {
252 	return (mod_info(&modlinkage, modinfop));
253 }
254 
255 /*
256  * KCF software provider control entry points.
257  */
258 /* ARGSUSED */
259 static void
260 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
261 {
262 	*status = CRYPTO_PROVIDER_READY;
263 }
264 
265 /*
266  * KCF software provider digest entry points.
267  */
268 
269 static int
270 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
271     crypto_req_handle_t req)
272 {
273 
274 	/*
275 	 * Allocate and initialize SHA2 context.
276 	 */
277 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
278 	    crypto_kmflag(req));
279 	if (ctx->cc_provider_private == NULL)
280 		return (CRYPTO_HOST_MEMORY);
281 
282 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
283 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
284 
285 	return (CRYPTO_SUCCESS);
286 }
287 
288 /*
289  * Helper SHA2 digest update function for uio data.
290  */
291 static int
292 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
293 {
294 	off_t offset = data->cd_offset;
295 	size_t length = data->cd_length;
296 	uint_t vec_idx;
297 	size_t cur_len;
298 
299 	/* we support only kernel buffer */
300 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
301 		return (CRYPTO_ARGUMENTS_BAD);
302 
303 	/*
304 	 * Jump to the first iovec containing data to be
305 	 * digested.
306 	 */
307 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
308 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
309 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
310 		;
311 	if (vec_idx == data->cd_uio->uio_iovcnt) {
312 		/*
313 		 * The caller specified an offset that is larger than the
314 		 * total size of the buffers it provided.
315 		 */
316 		return (CRYPTO_DATA_LEN_RANGE);
317 	}
318 
319 	/*
320 	 * Now do the digesting on the iovecs.
321 	 */
322 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
323 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
324 		    offset, length);
325 
326 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
327 		    uio_iov[vec_idx].iov_base + offset, cur_len);
328 		length -= cur_len;
329 		vec_idx++;
330 		offset = 0;
331 	}
332 
333 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
334 		/*
335 		 * The end of the specified iovec's was reached but
336 		 * the length requested could not be processed, i.e.
337 		 * The caller requested to digest more data than it provided.
338 		 */
339 		return (CRYPTO_DATA_LEN_RANGE);
340 	}
341 
342 	return (CRYPTO_SUCCESS);
343 }
344 
345 /*
346  * Helper SHA2 digest final function for uio data.
347  * digest_len is the length of the desired digest. If digest_len
348  * is smaller than the default SHA2 digest length, the caller
349  * must pass a scratch buffer, digest_scratch, which must
350  * be at least the algorithm's digest length bytes.
351  */
352 static int
353 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
354     ulong_t digest_len, uchar_t *digest_scratch)
355 {
356 	off_t offset = digest->cd_offset;
357 	uint_t vec_idx;
358 
359 	/* we support only kernel buffer */
360 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
361 		return (CRYPTO_ARGUMENTS_BAD);
362 
363 	/*
364 	 * Jump to the first iovec containing ptr to the digest to
365 	 * be returned.
366 	 */
367 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
368 	    vec_idx < digest->cd_uio->uio_iovcnt;
369 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
370 		;
371 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
372 		/*
373 		 * The caller specified an offset that is
374 		 * larger than the total size of the buffers
375 		 * it provided.
376 		 */
377 		return (CRYPTO_DATA_LEN_RANGE);
378 	}
379 
380 	if (offset + digest_len <=
381 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
382 		/*
383 		 * The computed SHA2 digest will fit in the current
384 		 * iovec.
385 		 */
386 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
387 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
388 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
389 		    (digest_len != SHA512_DIGEST_LENGTH))) {
390 			/*
391 			 * The caller requested a short digest. Digest
392 			 * into a scratch buffer and return to
393 			 * the user only what was requested.
394 			 */
395 			SHA2Final(digest_scratch, sha2_ctx);
396 
397 			bcopy(digest_scratch, (uchar_t *)digest->
398 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
399 			    digest_len);
400 		} else {
401 			SHA2Final((uchar_t *)digest->
402 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
403 			    sha2_ctx);
404 
405 		}
406 	} else {
407 		/*
408 		 * The computed digest will be crossing one or more iovec's.
409 		 * This is bad performance-wise but we need to support it.
410 		 * Allocate a small scratch buffer on the stack and
411 		 * copy it piece meal to the specified digest iovec's.
412 		 */
413 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
414 		off_t scratch_offset = 0;
415 		size_t length = digest_len;
416 		size_t cur_len;
417 
418 		SHA2Final(digest_tmp, sha2_ctx);
419 
420 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
421 			cur_len =
422 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
423 			    offset, length);
424 			bcopy(digest_tmp + scratch_offset,
425 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
426 			    cur_len);
427 
428 			length -= cur_len;
429 			vec_idx++;
430 			scratch_offset += cur_len;
431 			offset = 0;
432 		}
433 
434 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
435 			/*
436 			 * The end of the specified iovec's was reached but
437 			 * the length requested could not be processed, i.e.
438 			 * The caller requested to digest more data than it
439 			 * provided.
440 			 */
441 			return (CRYPTO_DATA_LEN_RANGE);
442 		}
443 	}
444 
445 	return (CRYPTO_SUCCESS);
446 }
447 
448 /*
449  * Helper SHA2 digest update for mblk's.
450  */
451 static int
452 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
453 {
454 	off_t offset = data->cd_offset;
455 	size_t length = data->cd_length;
456 	mblk_t *mp;
457 	size_t cur_len;
458 
459 	/*
460 	 * Jump to the first mblk_t containing data to be digested.
461 	 */
462 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
463 	    offset -= MBLKL(mp), mp = mp->b_cont)
464 		;
465 	if (mp == NULL) {
466 		/*
467 		 * The caller specified an offset that is larger than the
468 		 * total size of the buffers it provided.
469 		 */
470 		return (CRYPTO_DATA_LEN_RANGE);
471 	}
472 
473 	/*
474 	 * Now do the digesting on the mblk chain.
475 	 */
476 	while (mp != NULL && length > 0) {
477 		cur_len = MIN(MBLKL(mp) - offset, length);
478 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
479 		length -= cur_len;
480 		offset = 0;
481 		mp = mp->b_cont;
482 	}
483 
484 	if (mp == NULL && length > 0) {
485 		/*
486 		 * The end of the mblk was reached but the length requested
487 		 * could not be processed, i.e. The caller requested
488 		 * to digest more data than it provided.
489 		 */
490 		return (CRYPTO_DATA_LEN_RANGE);
491 	}
492 
493 	return (CRYPTO_SUCCESS);
494 }
495 
496 /*
497  * Helper SHA2 digest final for mblk's.
498  * digest_len is the length of the desired digest. If digest_len
499  * is smaller than the default SHA2 digest length, the caller
500  * must pass a scratch buffer, digest_scratch, which must
501  * be at least the algorithm's digest length bytes.
502  */
503 static int
504 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
505     ulong_t digest_len, uchar_t *digest_scratch)
506 {
507 	off_t offset = digest->cd_offset;
508 	mblk_t *mp;
509 
510 	/*
511 	 * Jump to the first mblk_t that will be used to store the digest.
512 	 */
513 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
514 	    offset -= MBLKL(mp), mp = mp->b_cont)
515 		;
516 	if (mp == NULL) {
517 		/*
518 		 * The caller specified an offset that is larger than the
519 		 * total size of the buffers it provided.
520 		 */
521 		return (CRYPTO_DATA_LEN_RANGE);
522 	}
523 
524 	if (offset + digest_len <= MBLKL(mp)) {
525 		/*
526 		 * The computed SHA2 digest will fit in the current mblk.
527 		 * Do the SHA2Final() in-place.
528 		 */
529 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
530 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
531 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
532 		    (digest_len != SHA512_DIGEST_LENGTH))) {
533 			/*
534 			 * The caller requested a short digest. Digest
535 			 * into a scratch buffer and return to
536 			 * the user only what was requested.
537 			 */
538 			SHA2Final(digest_scratch, sha2_ctx);
539 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
540 		} else {
541 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
542 		}
543 	} else {
544 		/*
545 		 * The computed digest will be crossing one or more mblk's.
546 		 * This is bad performance-wise but we need to support it.
547 		 * Allocate a small scratch buffer on the stack and
548 		 * copy it piece meal to the specified digest iovec's.
549 		 */
550 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
551 		off_t scratch_offset = 0;
552 		size_t length = digest_len;
553 		size_t cur_len;
554 
555 		SHA2Final(digest_tmp, sha2_ctx);
556 
557 		while (mp != NULL && length > 0) {
558 			cur_len = MIN(MBLKL(mp) - offset, length);
559 			bcopy(digest_tmp + scratch_offset,
560 			    mp->b_rptr + offset, cur_len);
561 
562 			length -= cur_len;
563 			mp = mp->b_cont;
564 			scratch_offset += cur_len;
565 			offset = 0;
566 		}
567 
568 		if (mp == NULL && length > 0) {
569 			/*
570 			 * The end of the specified mblk was reached but
571 			 * the length requested could not be processed, i.e.
572 			 * The caller requested to digest more data than it
573 			 * provided.
574 			 */
575 			return (CRYPTO_DATA_LEN_RANGE);
576 		}
577 	}
578 
579 	return (CRYPTO_SUCCESS);
580 }
581 
582 /* ARGSUSED */
583 static int
584 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
585     crypto_req_handle_t req)
586 {
587 	int ret = CRYPTO_SUCCESS;
588 	uint_t sha_digest_len;
589 
590 	ASSERT(ctx->cc_provider_private != NULL);
591 
592 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
593 	case SHA256_MECH_INFO_TYPE:
594 		sha_digest_len = SHA256_DIGEST_LENGTH;
595 		break;
596 	case SHA384_MECH_INFO_TYPE:
597 		sha_digest_len = SHA384_DIGEST_LENGTH;
598 		break;
599 	case SHA512_MECH_INFO_TYPE:
600 		sha_digest_len = SHA512_DIGEST_LENGTH;
601 		break;
602 	default:
603 		return (CRYPTO_MECHANISM_INVALID);
604 	}
605 
606 	/*
607 	 * We need to just return the length needed to store the output.
608 	 * We should not destroy the context for the following cases.
609 	 */
610 	if ((digest->cd_length == 0) ||
611 	    (digest->cd_length < sha_digest_len)) {
612 		digest->cd_length = sha_digest_len;
613 		return (CRYPTO_BUFFER_TOO_SMALL);
614 	}
615 
616 	/*
617 	 * Do the SHA2 update on the specified input data.
618 	 */
619 	switch (data->cd_format) {
620 	case CRYPTO_DATA_RAW:
621 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
622 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
623 		    data->cd_length);
624 		break;
625 	case CRYPTO_DATA_UIO:
626 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
627 		    data);
628 		break;
629 	case CRYPTO_DATA_MBLK:
630 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
631 		    data);
632 		break;
633 	default:
634 		ret = CRYPTO_ARGUMENTS_BAD;
635 	}
636 
637 	if (ret != CRYPTO_SUCCESS) {
638 		/* the update failed, free context and bail */
639 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
640 		ctx->cc_provider_private = NULL;
641 		digest->cd_length = 0;
642 		return (ret);
643 	}
644 
645 	/*
646 	 * Do a SHA2 final, must be done separately since the digest
647 	 * type can be different than the input data type.
648 	 */
649 	switch (digest->cd_format) {
650 	case CRYPTO_DATA_RAW:
651 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
652 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
653 		break;
654 	case CRYPTO_DATA_UIO:
655 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
656 		    digest, sha_digest_len, NULL);
657 		break;
658 	case CRYPTO_DATA_MBLK:
659 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
660 		    digest, sha_digest_len, NULL);
661 		break;
662 	default:
663 		ret = CRYPTO_ARGUMENTS_BAD;
664 	}
665 
666 	/* all done, free context and return */
667 
668 	if (ret == CRYPTO_SUCCESS)
669 		digest->cd_length = sha_digest_len;
670 	else
671 		digest->cd_length = 0;
672 
673 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
674 	ctx->cc_provider_private = NULL;
675 	return (ret);
676 }
677 
678 /* ARGSUSED */
679 static int
680 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
681     crypto_req_handle_t req)
682 {
683 	int ret = CRYPTO_SUCCESS;
684 
685 	ASSERT(ctx->cc_provider_private != NULL);
686 
687 	/*
688 	 * Do the SHA2 update on the specified input data.
689 	 */
690 	switch (data->cd_format) {
691 	case CRYPTO_DATA_RAW:
692 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
693 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
694 		    data->cd_length);
695 		break;
696 	case CRYPTO_DATA_UIO:
697 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
698 		    data);
699 		break;
700 	case CRYPTO_DATA_MBLK:
701 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
702 		    data);
703 		break;
704 	default:
705 		ret = CRYPTO_ARGUMENTS_BAD;
706 	}
707 
708 	return (ret);
709 }
710 
711 /* ARGSUSED */
712 static int
713 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
714     crypto_req_handle_t req)
715 {
716 	int ret = CRYPTO_SUCCESS;
717 	uint_t sha_digest_len;
718 
719 	ASSERT(ctx->cc_provider_private != NULL);
720 
721 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
722 	case SHA256_MECH_INFO_TYPE:
723 		sha_digest_len = SHA256_DIGEST_LENGTH;
724 		break;
725 	case SHA384_MECH_INFO_TYPE:
726 		sha_digest_len = SHA384_DIGEST_LENGTH;
727 		break;
728 	case SHA512_MECH_INFO_TYPE:
729 		sha_digest_len = SHA512_DIGEST_LENGTH;
730 		break;
731 	default:
732 		return (CRYPTO_MECHANISM_INVALID);
733 	}
734 
735 	/*
736 	 * We need to just return the length needed to store the output.
737 	 * We should not destroy the context for the following cases.
738 	 */
739 	if ((digest->cd_length == 0) ||
740 	    (digest->cd_length < sha_digest_len)) {
741 		digest->cd_length = sha_digest_len;
742 		return (CRYPTO_BUFFER_TOO_SMALL);
743 	}
744 
745 	/*
746 	 * Do a SHA2 final.
747 	 */
748 	switch (digest->cd_format) {
749 	case CRYPTO_DATA_RAW:
750 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
751 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
752 		break;
753 	case CRYPTO_DATA_UIO:
754 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
755 		    digest, sha_digest_len, NULL);
756 		break;
757 	case CRYPTO_DATA_MBLK:
758 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
759 		    digest, sha_digest_len, NULL);
760 		break;
761 	default:
762 		ret = CRYPTO_ARGUMENTS_BAD;
763 	}
764 
765 	/* all done, free context and return */
766 
767 	if (ret == CRYPTO_SUCCESS)
768 		digest->cd_length = sha_digest_len;
769 	else
770 		digest->cd_length = 0;
771 
772 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
773 	ctx->cc_provider_private = NULL;
774 
775 	return (ret);
776 }
777 
778 /* ARGSUSED */
779 static int
780 sha2_digest_atomic(crypto_provider_handle_t provider,
781     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
782     crypto_data_t *data, crypto_data_t *digest,
783     crypto_req_handle_t req)
784 {
785 	int ret = CRYPTO_SUCCESS;
786 	SHA2_CTX sha2_ctx;
787 	uint32_t sha_digest_len;
788 
789 	/*
790 	 * Do the SHA inits.
791 	 */
792 
793 	SHA2Init(mechanism->cm_type, &sha2_ctx);
794 
795 	switch (data->cd_format) {
796 	case CRYPTO_DATA_RAW:
797 		SHA2Update(&sha2_ctx, (uint8_t *)data->
798 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
799 		break;
800 	case CRYPTO_DATA_UIO:
801 		ret = sha2_digest_update_uio(&sha2_ctx, data);
802 		break;
803 	case CRYPTO_DATA_MBLK:
804 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
805 		break;
806 	default:
807 		ret = CRYPTO_ARGUMENTS_BAD;
808 	}
809 
810 	/*
811 	 * Do the SHA updates on the specified input data.
812 	 */
813 
814 	if (ret != CRYPTO_SUCCESS) {
815 		/* the update failed, bail */
816 		digest->cd_length = 0;
817 		return (ret);
818 	}
819 
820 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
821 		sha_digest_len = SHA256_DIGEST_LENGTH;
822 	else
823 		sha_digest_len = SHA512_DIGEST_LENGTH;
824 
825 	/*
826 	 * Do a SHA2 final, must be done separately since the digest
827 	 * type can be different than the input data type.
828 	 */
829 	switch (digest->cd_format) {
830 	case CRYPTO_DATA_RAW:
831 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
832 		    digest->cd_offset, &sha2_ctx);
833 		break;
834 	case CRYPTO_DATA_UIO:
835 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
836 		    sha_digest_len, NULL);
837 		break;
838 	case CRYPTO_DATA_MBLK:
839 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
840 		    sha_digest_len, NULL);
841 		break;
842 	default:
843 		ret = CRYPTO_ARGUMENTS_BAD;
844 	}
845 
846 	if (ret == CRYPTO_SUCCESS)
847 		digest->cd_length = sha_digest_len;
848 	else
849 		digest->cd_length = 0;
850 
851 	return (ret);
852 }
853 
854 /*
855  * KCF software provider mac entry points.
856  *
857  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
858  *
859  * Init:
860  * The initialization routine initializes what we denote
861  * as the inner and outer contexts by doing
862  * - for inner context: SHA2(key XOR ipad)
863  * - for outer context: SHA2(key XOR opad)
864  *
865  * Update:
866  * Each subsequent SHA2 HMAC update will result in an
867  * update of the inner context with the specified data.
868  *
869  * Final:
870  * The SHA2 HMAC final will do a SHA2 final operation on the
871  * inner context, and the resulting digest will be used
872  * as the data for an update on the outer context. Last
873  * but not least, a SHA2 final on the outer context will
874  * be performed to obtain the SHA2 HMAC digest to return
875  * to the user.
876  */
877 
878 /*
879  * Initialize a SHA2-HMAC context.
880  */
881 static void
882 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
883 {
884 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
885 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
886 	int i, block_size, blocks_per_int64;
887 
888 	/* Determine the block size */
889 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
890 		block_size = SHA256_HMAC_BLOCK_SIZE;
891 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
892 	} else {
893 		block_size = SHA512_HMAC_BLOCK_SIZE;
894 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
895 	}
896 
897 	(void) bzero(ipad, block_size);
898 	(void) bzero(opad, block_size);
899 	(void) bcopy(keyval, ipad, length_in_bytes);
900 	(void) bcopy(keyval, opad, length_in_bytes);
901 
902 	/* XOR key with ipad (0x36) and opad (0x5c) */
903 	for (i = 0; i < blocks_per_int64; i ++) {
904 		ipad[i] ^= 0x3636363636363636;
905 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
906 	}
907 
908 	/* perform SHA2 on ipad */
909 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
910 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
911 
912 	/* perform SHA2 on opad */
913 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
914 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
915 
916 }
917 
918 /*
919  */
920 static int
921 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
922     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
923     crypto_req_handle_t req)
924 {
925 	int ret = CRYPTO_SUCCESS;
926 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
927 	uint_t sha_digest_len, sha_hmac_block_size;
928 
929 	/*
930 	 * Set the digest length and block size to values appropriate to the
931 	 * mechanism
932 	 */
933 	switch (mechanism->cm_type) {
934 	case SHA256_HMAC_MECH_INFO_TYPE:
935 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
936 		sha_digest_len = SHA256_DIGEST_LENGTH;
937 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
938 		break;
939 	case SHA384_HMAC_MECH_INFO_TYPE:
940 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
941 	case SHA512_HMAC_MECH_INFO_TYPE:
942 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
943 		sha_digest_len = SHA512_DIGEST_LENGTH;
944 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
945 		break;
946 	default:
947 		return (CRYPTO_MECHANISM_INVALID);
948 	}
949 
950 	if (key->ck_format != CRYPTO_KEY_RAW)
951 		return (CRYPTO_ARGUMENTS_BAD);
952 
953 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
954 	    crypto_kmflag(req));
955 	if (ctx->cc_provider_private == NULL)
956 		return (CRYPTO_HOST_MEMORY);
957 
958 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
959 	if (ctx_template != NULL) {
960 		/* reuse context template */
961 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
962 		    sizeof (sha2_hmac_ctx_t));
963 	} else {
964 		/* no context template, compute context */
965 		if (keylen_in_bytes > sha_hmac_block_size) {
966 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
967 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
968 
969 			/*
970 			 * Hash the passed-in key to get a smaller key.
971 			 * The inner context is used since it hasn't been
972 			 * initialized yet.
973 			 */
974 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
975 			    &hmac_ctx->hc_icontext,
976 			    key->ck_data, keylen_in_bytes, digested_key);
977 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
978 			    digested_key, sha_digest_len);
979 		} else {
980 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
981 			    key->ck_data, keylen_in_bytes);
982 		}
983 	}
984 
985 	/*
986 	 * Get the mechanism parameters, if applicable.
987 	 */
988 	if (mechanism->cm_type % 3 == 2) {
989 		if (mechanism->cm_param == NULL ||
990 		    mechanism->cm_param_len != sizeof (ulong_t))
991 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
992 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
993 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
994 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
995 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
996 	}
997 
998 	if (ret != CRYPTO_SUCCESS) {
999 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1000 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1001 		ctx->cc_provider_private = NULL;
1002 	}
1003 
1004 	return (ret);
1005 }
1006 
1007 /* ARGSUSED */
1008 static int
1009 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1010     crypto_req_handle_t req)
1011 {
1012 	int ret = CRYPTO_SUCCESS;
1013 
1014 	ASSERT(ctx->cc_provider_private != NULL);
1015 
1016 	/*
1017 	 * Do a SHA2 update of the inner context using the specified
1018 	 * data.
1019 	 */
1020 	switch (data->cd_format) {
1021 	case CRYPTO_DATA_RAW:
1022 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1023 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1024 		    data->cd_length);
1025 		break;
1026 	case CRYPTO_DATA_UIO:
1027 		ret = sha2_digest_update_uio(
1028 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1029 		break;
1030 	case CRYPTO_DATA_MBLK:
1031 		ret = sha2_digest_update_mblk(
1032 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1033 		break;
1034 	default:
1035 		ret = CRYPTO_ARGUMENTS_BAD;
1036 	}
1037 
1038 	return (ret);
1039 }
1040 
1041 /* ARGSUSED */
1042 static int
1043 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1044 {
1045 	int ret = CRYPTO_SUCCESS;
1046 	uchar_t digest[SHA512_DIGEST_LENGTH];
1047 	uint32_t digest_len, sha_digest_len;
1048 
1049 	ASSERT(ctx->cc_provider_private != NULL);
1050 
1051 	/* Set the digest lengths to values appropriate to the mechanism */
1052 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1053 	case SHA256_HMAC_MECH_INFO_TYPE:
1054 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1055 		break;
1056 	case SHA384_HMAC_MECH_INFO_TYPE:
1057 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1058 		break;
1059 	case SHA512_HMAC_MECH_INFO_TYPE:
1060 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1061 		break;
1062 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1063 		sha_digest_len = SHA256_DIGEST_LENGTH;
1064 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1065 		break;
1066 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1067 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1068 		sha_digest_len = SHA512_DIGEST_LENGTH;
1069 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1070 		break;
1071 	}
1072 
1073 	/*
1074 	 * We need to just return the length needed to store the output.
1075 	 * We should not destroy the context for the following cases.
1076 	 */
1077 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1078 		mac->cd_length = digest_len;
1079 		return (CRYPTO_BUFFER_TOO_SMALL);
1080 	}
1081 
1082 	/*
1083 	 * Do a SHA2 final on the inner context.
1084 	 */
1085 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1086 
1087 	/*
1088 	 * Do a SHA2 update on the outer context, feeding the inner
1089 	 * digest as data.
1090 	 */
1091 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1092 	    sha_digest_len);
1093 
1094 	/*
1095 	 * Do a SHA2 final on the outer context, storing the computing
1096 	 * digest in the users buffer.
1097 	 */
1098 	switch (mac->cd_format) {
1099 	case CRYPTO_DATA_RAW:
1100 		if (digest_len != sha_digest_len) {
1101 			/*
1102 			 * The caller requested a short digest. Digest
1103 			 * into a scratch buffer and return to
1104 			 * the user only what was requested.
1105 			 */
1106 			SHA2Final(digest,
1107 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1108 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1109 			    mac->cd_offset, digest_len);
1110 		} else {
1111 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1112 			    mac->cd_offset,
1113 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1114 		}
1115 		break;
1116 	case CRYPTO_DATA_UIO:
1117 		ret = sha2_digest_final_uio(
1118 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1119 		    digest_len, digest);
1120 		break;
1121 	case CRYPTO_DATA_MBLK:
1122 		ret = sha2_digest_final_mblk(
1123 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1124 		    digest_len, digest);
1125 		break;
1126 	default:
1127 		ret = CRYPTO_ARGUMENTS_BAD;
1128 	}
1129 
1130 	if (ret == CRYPTO_SUCCESS)
1131 		mac->cd_length = digest_len;
1132 	else
1133 		mac->cd_length = 0;
1134 
1135 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1136 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1137 	ctx->cc_provider_private = NULL;
1138 
1139 	return (ret);
1140 }
1141 
1142 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1143 	switch (data->cd_format) {					\
1144 	case CRYPTO_DATA_RAW:						\
1145 		SHA2Update(&(ctx).hc_icontext,				\
1146 		    (uint8_t *)data->cd_raw.iov_base +			\
1147 		    data->cd_offset, data->cd_length);			\
1148 		break;							\
1149 	case CRYPTO_DATA_UIO:						\
1150 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1151 		break;							\
1152 	case CRYPTO_DATA_MBLK:						\
1153 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1154 		    data);						\
1155 		break;							\
1156 	default:							\
1157 		ret = CRYPTO_ARGUMENTS_BAD;				\
1158 	}								\
1159 }
1160 
1161 /* ARGSUSED */
1162 static int
1163 sha2_mac_atomic(crypto_provider_handle_t provider,
1164     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1165     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1166     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1167 {
1168 	int ret = CRYPTO_SUCCESS;
1169 	uchar_t digest[SHA512_DIGEST_LENGTH];
1170 	sha2_hmac_ctx_t sha2_hmac_ctx;
1171 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1172 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1173 
1174 	/*
1175 	 * Set the digest length and block size to values appropriate to the
1176 	 * mechanism
1177 	 */
1178 	switch (mechanism->cm_type) {
1179 	case SHA256_HMAC_MECH_INFO_TYPE:
1180 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1181 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1182 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1183 		break;
1184 	case SHA384_HMAC_MECH_INFO_TYPE:
1185 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1186 	case SHA512_HMAC_MECH_INFO_TYPE:
1187 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1188 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1189 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1190 		break;
1191 	default:
1192 		return (CRYPTO_MECHANISM_INVALID);
1193 	}
1194 
1195 	/* Add support for key by attributes (RFE 4706552) */
1196 	if (key->ck_format != CRYPTO_KEY_RAW)
1197 		return (CRYPTO_ARGUMENTS_BAD);
1198 
1199 	if (ctx_template != NULL) {
1200 		/* reuse context template */
1201 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1202 	} else {
1203 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1204 		/* no context template, initialize context */
1205 		if (keylen_in_bytes > sha_hmac_block_size) {
1206 			/*
1207 			 * Hash the passed-in key to get a smaller key.
1208 			 * The inner context is used since it hasn't been
1209 			 * initialized yet.
1210 			 */
1211 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1212 			    &sha2_hmac_ctx.hc_icontext,
1213 			    key->ck_data, keylen_in_bytes, digest);
1214 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1215 			    sha_digest_len);
1216 		} else {
1217 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1218 			    keylen_in_bytes);
1219 		}
1220 	}
1221 
1222 	/* get the mechanism parameters, if applicable */
1223 	if ((mechanism->cm_type % 3) == 2) {
1224 		if (mechanism->cm_param == NULL ||
1225 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1226 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1227 			goto bail;
1228 		}
1229 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1230 		if (digest_len > sha_digest_len) {
1231 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1232 			goto bail;
1233 		}
1234 	}
1235 
1236 	/* do a SHA2 update of the inner context using the specified data */
1237 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1238 	if (ret != CRYPTO_SUCCESS)
1239 		/* the update failed, free context and bail */
1240 		goto bail;
1241 
1242 	/*
1243 	 * Do a SHA2 final on the inner context.
1244 	 */
1245 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1246 
1247 	/*
1248 	 * Do an SHA2 update on the outer context, feeding the inner
1249 	 * digest as data.
1250 	 *
1251 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1252 	 * bytes of the inner hash value.
1253 	 */
1254 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1255 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1256 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1257 		    SHA384_DIGEST_LENGTH);
1258 	else
1259 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1260 
1261 	/*
1262 	 * Do a SHA2 final on the outer context, storing the computed
1263 	 * digest in the users buffer.
1264 	 */
1265 	switch (mac->cd_format) {
1266 	case CRYPTO_DATA_RAW:
1267 		if (digest_len != sha_digest_len) {
1268 			/*
1269 			 * The caller requested a short digest. Digest
1270 			 * into a scratch buffer and return to
1271 			 * the user only what was requested.
1272 			 */
1273 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1274 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1275 			    mac->cd_offset, digest_len);
1276 		} else {
1277 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1278 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1279 		}
1280 		break;
1281 	case CRYPTO_DATA_UIO:
1282 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1283 		    digest_len, digest);
1284 		break;
1285 	case CRYPTO_DATA_MBLK:
1286 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1287 		    digest_len, digest);
1288 		break;
1289 	default:
1290 		ret = CRYPTO_ARGUMENTS_BAD;
1291 	}
1292 
1293 	if (ret == CRYPTO_SUCCESS) {
1294 		mac->cd_length = digest_len;
1295 		return (CRYPTO_SUCCESS);
1296 	}
1297 bail:
1298 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1299 	mac->cd_length = 0;
1300 	return (ret);
1301 }
1302 
1303 /* ARGSUSED */
1304 static int
1305 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1306     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1307     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1308     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1309 {
1310 	int ret = CRYPTO_SUCCESS;
1311 	uchar_t digest[SHA512_DIGEST_LENGTH];
1312 	sha2_hmac_ctx_t sha2_hmac_ctx;
1313 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1314 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1315 
1316 	/*
1317 	 * Set the digest length and block size to values appropriate to the
1318 	 * mechanism
1319 	 */
1320 	switch (mechanism->cm_type) {
1321 	case SHA256_HMAC_MECH_INFO_TYPE:
1322 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1323 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1324 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1325 		break;
1326 	case SHA384_HMAC_MECH_INFO_TYPE:
1327 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1328 	case SHA512_HMAC_MECH_INFO_TYPE:
1329 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1330 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1331 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1332 		break;
1333 	default:
1334 		return (CRYPTO_MECHANISM_INVALID);
1335 	}
1336 
1337 	/* Add support for key by attributes (RFE 4706552) */
1338 	if (key->ck_format != CRYPTO_KEY_RAW)
1339 		return (CRYPTO_ARGUMENTS_BAD);
1340 
1341 	if (ctx_template != NULL) {
1342 		/* reuse context template */
1343 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1344 	} else {
1345 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1346 		/* no context template, initialize context */
1347 		if (keylen_in_bytes > sha_hmac_block_size) {
1348 			/*
1349 			 * Hash the passed-in key to get a smaller key.
1350 			 * The inner context is used since it hasn't been
1351 			 * initialized yet.
1352 			 */
1353 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1354 			    &sha2_hmac_ctx.hc_icontext,
1355 			    key->ck_data, keylen_in_bytes, digest);
1356 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1357 			    sha_digest_len);
1358 		} else {
1359 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1360 			    keylen_in_bytes);
1361 		}
1362 	}
1363 
1364 	/* get the mechanism parameters, if applicable */
1365 	if (mechanism->cm_type % 3 == 2) {
1366 		if (mechanism->cm_param == NULL ||
1367 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1368 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1369 			goto bail;
1370 		}
1371 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1372 		if (digest_len > sha_digest_len) {
1373 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1374 			goto bail;
1375 		}
1376 	}
1377 
1378 	if (mac->cd_length != digest_len) {
1379 		ret = CRYPTO_INVALID_MAC;
1380 		goto bail;
1381 	}
1382 
1383 	/* do a SHA2 update of the inner context using the specified data */
1384 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1385 	if (ret != CRYPTO_SUCCESS)
1386 		/* the update failed, free context and bail */
1387 		goto bail;
1388 
1389 	/* do a SHA2 final on the inner context */
1390 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1391 
1392 	/*
1393 	 * Do an SHA2 update on the outer context, feeding the inner
1394 	 * digest as data.
1395 	 *
1396 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1397 	 * bytes of the inner hash value.
1398 	 */
1399 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1400 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1401 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1402 		    SHA384_DIGEST_LENGTH);
1403 	else
1404 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1405 
1406 	/*
1407 	 * Do a SHA2 final on the outer context, storing the computed
1408 	 * digest in the users buffer.
1409 	 */
1410 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1411 
1412 	/*
1413 	 * Compare the computed digest against the expected digest passed
1414 	 * as argument.
1415 	 */
1416 
1417 	switch (mac->cd_format) {
1418 
1419 	case CRYPTO_DATA_RAW:
1420 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1421 		    mac->cd_offset, digest_len) != 0)
1422 			ret = CRYPTO_INVALID_MAC;
1423 		break;
1424 
1425 	case CRYPTO_DATA_UIO: {
1426 		off_t offset = mac->cd_offset;
1427 		uint_t vec_idx;
1428 		off_t scratch_offset = 0;
1429 		size_t length = digest_len;
1430 		size_t cur_len;
1431 
1432 		/* we support only kernel buffer */
1433 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1434 			return (CRYPTO_ARGUMENTS_BAD);
1435 
1436 		/* jump to the first iovec containing the expected digest */
1437 		for (vec_idx = 0;
1438 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1439 		    vec_idx < mac->cd_uio->uio_iovcnt;
1440 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1441 			;
1442 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
1443 			/*
1444 			 * The caller specified an offset that is
1445 			 * larger than the total size of the buffers
1446 			 * it provided.
1447 			 */
1448 			ret = CRYPTO_DATA_LEN_RANGE;
1449 			break;
1450 		}
1451 
1452 		/* do the comparison of computed digest vs specified one */
1453 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1454 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1455 			    offset, length);
1456 
1457 			if (bcmp(digest + scratch_offset,
1458 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1459 			    cur_len) != 0) {
1460 				ret = CRYPTO_INVALID_MAC;
1461 				break;
1462 			}
1463 
1464 			length -= cur_len;
1465 			vec_idx++;
1466 			scratch_offset += cur_len;
1467 			offset = 0;
1468 		}
1469 		break;
1470 	}
1471 
1472 	case CRYPTO_DATA_MBLK: {
1473 		off_t offset = mac->cd_offset;
1474 		mblk_t *mp;
1475 		off_t scratch_offset = 0;
1476 		size_t length = digest_len;
1477 		size_t cur_len;
1478 
1479 		/* jump to the first mblk_t containing the expected digest */
1480 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1481 		    offset -= MBLKL(mp), mp = mp->b_cont)
1482 			;
1483 		if (mp == NULL) {
1484 			/*
1485 			 * The caller specified an offset that is larger than
1486 			 * the total size of the buffers it provided.
1487 			 */
1488 			ret = CRYPTO_DATA_LEN_RANGE;
1489 			break;
1490 		}
1491 
1492 		while (mp != NULL && length > 0) {
1493 			cur_len = MIN(MBLKL(mp) - offset, length);
1494 			if (bcmp(digest + scratch_offset,
1495 			    mp->b_rptr + offset, cur_len) != 0) {
1496 				ret = CRYPTO_INVALID_MAC;
1497 				break;
1498 			}
1499 
1500 			length -= cur_len;
1501 			mp = mp->b_cont;
1502 			scratch_offset += cur_len;
1503 			offset = 0;
1504 		}
1505 		break;
1506 	}
1507 
1508 	default:
1509 		ret = CRYPTO_ARGUMENTS_BAD;
1510 	}
1511 
1512 	return (ret);
1513 bail:
1514 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1515 	mac->cd_length = 0;
1516 	return (ret);
1517 }
1518 
1519 /*
1520  * KCF software provider context management entry points.
1521  */
1522 
1523 /* ARGSUSED */
1524 static int
1525 sha2_create_ctx_template(crypto_provider_handle_t provider,
1526     crypto_mechanism_t *mechanism, crypto_key_t *key,
1527     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1528     crypto_req_handle_t req)
1529 {
1530 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1531 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1532 	uint32_t sha_digest_len, sha_hmac_block_size;
1533 
1534 	/*
1535 	 * Set the digest length and block size to values appropriate to the
1536 	 * mechanism
1537 	 */
1538 	switch (mechanism->cm_type) {
1539 	case SHA256_HMAC_MECH_INFO_TYPE:
1540 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1541 		sha_digest_len = SHA256_DIGEST_LENGTH;
1542 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1543 		break;
1544 	case SHA384_HMAC_MECH_INFO_TYPE:
1545 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1546 	case SHA512_HMAC_MECH_INFO_TYPE:
1547 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1548 		sha_digest_len = SHA512_DIGEST_LENGTH;
1549 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1550 		break;
1551 	default:
1552 		return (CRYPTO_MECHANISM_INVALID);
1553 	}
1554 
1555 	/* Add support for key by attributes (RFE 4706552) */
1556 	if (key->ck_format != CRYPTO_KEY_RAW)
1557 		return (CRYPTO_ARGUMENTS_BAD);
1558 
1559 	/*
1560 	 * Allocate and initialize SHA2 context.
1561 	 */
1562 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1563 	    crypto_kmflag(req));
1564 	if (sha2_hmac_ctx_tmpl == NULL)
1565 		return (CRYPTO_HOST_MEMORY);
1566 
1567 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1568 
1569 	if (keylen_in_bytes > sha_hmac_block_size) {
1570 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1571 
1572 		/*
1573 		 * Hash the passed-in key to get a smaller key.
1574 		 * The inner context is used since it hasn't been
1575 		 * initialized yet.
1576 		 */
1577 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1578 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1579 		    key->ck_data, keylen_in_bytes, digested_key);
1580 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1581 		    sha_digest_len);
1582 	} else {
1583 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1584 		    keylen_in_bytes);
1585 	}
1586 
1587 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1588 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1589 
1590 	return (CRYPTO_SUCCESS);
1591 }
1592 
1593 static int
1594 sha2_free_context(crypto_ctx_t *ctx)
1595 {
1596 	uint_t ctx_len;
1597 
1598 	if (ctx->cc_provider_private == NULL)
1599 		return (CRYPTO_SUCCESS);
1600 
1601 	/*
1602 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1603 	 * have different lengths.
1604 	 *
1605 	 * Note: Below is dependent on the mechanism ordering.
1606 	 */
1607 
1608 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1609 		ctx_len = sizeof (sha2_ctx_t);
1610 	else
1611 		ctx_len = sizeof (sha2_hmac_ctx_t);
1612 
1613 	bzero(ctx->cc_provider_private, ctx_len);
1614 	kmem_free(ctx->cc_provider_private, ctx_len);
1615 	ctx->cc_provider_private = NULL;
1616 
1617 	return (CRYPTO_SUCCESS);
1618 }
1619 
1620 /*
1621  * SHA-2 Power-Up Self-Test
1622  */
1623 void
1624 sha2_POST(int *rc)
1625 {
1626 
1627 	*rc = fips_sha2_post();
1628 
1629 }
1630