xref: /titanic_51/usr/src/uts/common/crypto/io/sha2_mod.c (revision 9e86db79b7d1bbc5f2f04e99954cbd5eae0e22bb)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/modctl.h>
28 #include <sys/cmn_err.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/strsun.h>
32 #include <sys/systm.h>
33 #include <sys/sysmacros.h>
34 #define	_SHA2_IMPL
35 #include <sys/sha2.h>
36 #include <sha2/sha2_impl.h>
37 
38 /*
39  * The sha2 module is created with two modlinkages:
40  * - a modlmisc that allows consumers to directly call the entry points
41  *   SHA2Init, SHA2Update, and SHA2Final.
42  * - a modlcrypto that allows the module to register with the Kernel
43  *   Cryptographic Framework (KCF) as a software provider for the SHA2
44  *   mechanisms.
45  */
46 
47 static struct modlmisc modlmisc = {
48 	&mod_miscops,
49 	"SHA2 Message-Digest Algorithm"
50 };
51 
52 static struct modlcrypto modlcrypto = {
53 	&mod_cryptoops,
54 	"SHA2 Kernel SW Provider"
55 };
56 
57 static struct modlinkage modlinkage = {
58 	MODREV_1, &modlmisc, &modlcrypto, NULL
59 };
60 
61 /*
62  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
63  * by KCF to one of the entry points.
64  */
65 
66 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
67 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
68 
69 /* to extract the digest length passed as mechanism parameter */
70 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
71 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
72 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
73 	else {								\
74 		ulong_t tmp_ulong;					\
75 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
76 		(len) = (uint32_t)tmp_ulong;				\
77 	}								\
78 }
79 
80 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
81 	SHA2Init(mech, ctx);				\
82 	SHA2Update(ctx, key, len);			\
83 	SHA2Final(digest, ctx);				\
84 }
85 
86 /*
87  * Mechanism info structure passed to KCF during registration.
88  */
89 static crypto_mech_info_t sha2_mech_info_tab[] = {
90 	/* SHA256 */
91 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
92 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
93 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
94 	/* SHA256-HMAC */
95 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
96 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
97 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
98 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 	/* SHA256-HMAC GENERAL */
100 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
101 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
102 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
103 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
104 	/* SHA384 */
105 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
106 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
107 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
108 	/* SHA384-HMAC */
109 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
110 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
111 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
112 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
113 	/* SHA384-HMAC GENERAL */
114 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
115 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
116 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
117 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
118 	/* SHA512 */
119 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
120 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
121 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
122 	/* SHA512-HMAC */
123 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
124 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
125 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
126 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
127 	/* SHA512-HMAC GENERAL */
128 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
129 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
130 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
131 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES}
132 };
133 
134 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
135 
136 static crypto_control_ops_t sha2_control_ops = {
137 	sha2_provider_status
138 };
139 
140 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
141     crypto_req_handle_t);
142 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
143     crypto_req_handle_t);
144 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
145     crypto_req_handle_t);
146 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
147     crypto_req_handle_t);
148 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
149     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
150     crypto_req_handle_t);
151 
152 static crypto_digest_ops_t sha2_digest_ops = {
153 	sha2_digest_init,
154 	sha2_digest,
155 	sha2_digest_update,
156 	NULL,
157 	sha2_digest_final,
158 	sha2_digest_atomic
159 };
160 
161 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
162     crypto_spi_ctx_template_t, crypto_req_handle_t);
163 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
164     crypto_req_handle_t);
165 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
166 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
167     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
168     crypto_spi_ctx_template_t, crypto_req_handle_t);
169 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
170     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
171     crypto_spi_ctx_template_t, crypto_req_handle_t);
172 
173 static crypto_mac_ops_t sha2_mac_ops = {
174 	sha2_mac_init,
175 	NULL,
176 	sha2_mac_update,
177 	sha2_mac_final,
178 	sha2_mac_atomic,
179 	sha2_mac_verify_atomic
180 };
181 
182 static int sha2_create_ctx_template(crypto_provider_handle_t,
183     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
184     size_t *, crypto_req_handle_t);
185 static int sha2_free_context(crypto_ctx_t *);
186 
187 static crypto_ctx_ops_t sha2_ctx_ops = {
188 	sha2_create_ctx_template,
189 	sha2_free_context
190 };
191 
192 static crypto_ops_t sha2_crypto_ops = {
193 	&sha2_control_ops,
194 	&sha2_digest_ops,
195 	NULL,
196 	&sha2_mac_ops,
197 	NULL,
198 	NULL,
199 	NULL,
200 	NULL,
201 	NULL,
202 	NULL,
203 	NULL,
204 	NULL,
205 	NULL,
206 	&sha2_ctx_ops
207 };
208 
209 static crypto_provider_info_t sha2_prov_info = {
210 	CRYPTO_SPI_VERSION_1,
211 	"SHA2 Software Provider",
212 	CRYPTO_SW_PROVIDER,
213 	{&modlinkage},
214 	NULL,
215 	&sha2_crypto_ops,
216 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
217 	sha2_mech_info_tab
218 };
219 
220 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
221 
222 int
223 _init()
224 {
225 	int ret;
226 
227 	if ((ret = mod_install(&modlinkage)) != 0)
228 		return (ret);
229 
230 	/*
231 	 * Register with KCF. If the registration fails, log an
232 	 * error but do not uninstall the module, since the functionality
233 	 * provided by misc/sha2 should still be available.
234 	 */
235 	if ((ret = crypto_register_provider(&sha2_prov_info,
236 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
237 		cmn_err(CE_WARN, "sha2 _init: "
238 		    "crypto_register_provider() failed (0x%x)", ret);
239 
240 	return (0);
241 }
242 
243 int
244 _info(struct modinfo *modinfop)
245 {
246 	return (mod_info(&modlinkage, modinfop));
247 }
248 
249 /*
250  * KCF software provider control entry points.
251  */
252 /* ARGSUSED */
253 static void
254 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
255 {
256 	*status = CRYPTO_PROVIDER_READY;
257 }
258 
259 /*
260  * KCF software provider digest entry points.
261  */
262 
263 static int
264 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
265     crypto_req_handle_t req)
266 {
267 
268 	/*
269 	 * Allocate and initialize SHA2 context.
270 	 */
271 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
272 	    crypto_kmflag(req));
273 	if (ctx->cc_provider_private == NULL)
274 		return (CRYPTO_HOST_MEMORY);
275 
276 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
277 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
278 
279 	return (CRYPTO_SUCCESS);
280 }
281 
282 /*
283  * Helper SHA2 digest update function for uio data.
284  */
285 static int
286 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
287 {
288 	off_t offset = data->cd_offset;
289 	size_t length = data->cd_length;
290 	uint_t vec_idx;
291 	size_t cur_len;
292 
293 	/* we support only kernel buffer */
294 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
295 		return (CRYPTO_ARGUMENTS_BAD);
296 
297 	/*
298 	 * Jump to the first iovec containing data to be
299 	 * digested.
300 	 */
301 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
302 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
303 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
304 		;
305 	if (vec_idx == data->cd_uio->uio_iovcnt) {
306 		/*
307 		 * The caller specified an offset that is larger than the
308 		 * total size of the buffers it provided.
309 		 */
310 		return (CRYPTO_DATA_LEN_RANGE);
311 	}
312 
313 	/*
314 	 * Now do the digesting on the iovecs.
315 	 */
316 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
317 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
318 		    offset, length);
319 
320 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
321 		    uio_iov[vec_idx].iov_base + offset, cur_len);
322 		length -= cur_len;
323 		vec_idx++;
324 		offset = 0;
325 	}
326 
327 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
328 		/*
329 		 * The end of the specified iovec's was reached but
330 		 * the length requested could not be processed, i.e.
331 		 * The caller requested to digest more data than it provided.
332 		 */
333 		return (CRYPTO_DATA_LEN_RANGE);
334 	}
335 
336 	return (CRYPTO_SUCCESS);
337 }
338 
339 /*
340  * Helper SHA2 digest final function for uio data.
341  * digest_len is the length of the desired digest. If digest_len
342  * is smaller than the default SHA2 digest length, the caller
343  * must pass a scratch buffer, digest_scratch, which must
344  * be at least the algorithm's digest length bytes.
345  */
346 static int
347 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
348     ulong_t digest_len, uchar_t *digest_scratch)
349 {
350 	off_t offset = digest->cd_offset;
351 	uint_t vec_idx;
352 
353 	/* we support only kernel buffer */
354 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
355 		return (CRYPTO_ARGUMENTS_BAD);
356 
357 	/*
358 	 * Jump to the first iovec containing ptr to the digest to
359 	 * be returned.
360 	 */
361 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
362 	    vec_idx < digest->cd_uio->uio_iovcnt;
363 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
364 		;
365 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
366 		/*
367 		 * The caller specified an offset that is
368 		 * larger than the total size of the buffers
369 		 * it provided.
370 		 */
371 		return (CRYPTO_DATA_LEN_RANGE);
372 	}
373 
374 	if (offset + digest_len <=
375 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
376 		/*
377 		 * The computed SHA2 digest will fit in the current
378 		 * iovec.
379 		 */
380 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
381 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
382 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
383 		    (digest_len != SHA512_DIGEST_LENGTH))) {
384 			/*
385 			 * The caller requested a short digest. Digest
386 			 * into a scratch buffer and return to
387 			 * the user only what was requested.
388 			 */
389 			SHA2Final(digest_scratch, sha2_ctx);
390 
391 			bcopy(digest_scratch, (uchar_t *)digest->
392 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
393 			    digest_len);
394 		} else {
395 			SHA2Final((uchar_t *)digest->
396 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
397 			    sha2_ctx);
398 
399 		}
400 	} else {
401 		/*
402 		 * The computed digest will be crossing one or more iovec's.
403 		 * This is bad performance-wise but we need to support it.
404 		 * Allocate a small scratch buffer on the stack and
405 		 * copy it piece meal to the specified digest iovec's.
406 		 */
407 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
408 		off_t scratch_offset = 0;
409 		size_t length = digest_len;
410 		size_t cur_len;
411 
412 		SHA2Final(digest_tmp, sha2_ctx);
413 
414 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
415 			cur_len =
416 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
417 			    offset, length);
418 			bcopy(digest_tmp + scratch_offset,
419 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
420 			    cur_len);
421 
422 			length -= cur_len;
423 			vec_idx++;
424 			scratch_offset += cur_len;
425 			offset = 0;
426 		}
427 
428 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
429 			/*
430 			 * The end of the specified iovec's was reached but
431 			 * the length requested could not be processed, i.e.
432 			 * The caller requested to digest more data than it
433 			 * provided.
434 			 */
435 			return (CRYPTO_DATA_LEN_RANGE);
436 		}
437 	}
438 
439 	return (CRYPTO_SUCCESS);
440 }
441 
442 /*
443  * Helper SHA2 digest update for mblk's.
444  */
445 static int
446 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
447 {
448 	off_t offset = data->cd_offset;
449 	size_t length = data->cd_length;
450 	mblk_t *mp;
451 	size_t cur_len;
452 
453 	/*
454 	 * Jump to the first mblk_t containing data to be digested.
455 	 */
456 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
457 	    offset -= MBLKL(mp), mp = mp->b_cont)
458 		;
459 	if (mp == NULL) {
460 		/*
461 		 * The caller specified an offset that is larger than the
462 		 * total size of the buffers it provided.
463 		 */
464 		return (CRYPTO_DATA_LEN_RANGE);
465 	}
466 
467 	/*
468 	 * Now do the digesting on the mblk chain.
469 	 */
470 	while (mp != NULL && length > 0) {
471 		cur_len = MIN(MBLKL(mp) - offset, length);
472 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
473 		length -= cur_len;
474 		offset = 0;
475 		mp = mp->b_cont;
476 	}
477 
478 	if (mp == NULL && length > 0) {
479 		/*
480 		 * The end of the mblk was reached but the length requested
481 		 * could not be processed, i.e. The caller requested
482 		 * to digest more data than it provided.
483 		 */
484 		return (CRYPTO_DATA_LEN_RANGE);
485 	}
486 
487 	return (CRYPTO_SUCCESS);
488 }
489 
490 /*
491  * Helper SHA2 digest final for mblk's.
492  * digest_len is the length of the desired digest. If digest_len
493  * is smaller than the default SHA2 digest length, the caller
494  * must pass a scratch buffer, digest_scratch, which must
495  * be at least the algorithm's digest length bytes.
496  */
497 static int
498 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
499     ulong_t digest_len, uchar_t *digest_scratch)
500 {
501 	off_t offset = digest->cd_offset;
502 	mblk_t *mp;
503 
504 	/*
505 	 * Jump to the first mblk_t that will be used to store the digest.
506 	 */
507 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
508 	    offset -= MBLKL(mp), mp = mp->b_cont)
509 		;
510 	if (mp == NULL) {
511 		/*
512 		 * The caller specified an offset that is larger than the
513 		 * total size of the buffers it provided.
514 		 */
515 		return (CRYPTO_DATA_LEN_RANGE);
516 	}
517 
518 	if (offset + digest_len <= MBLKL(mp)) {
519 		/*
520 		 * The computed SHA2 digest will fit in the current mblk.
521 		 * Do the SHA2Final() in-place.
522 		 */
523 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
524 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
525 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
526 		    (digest_len != SHA512_DIGEST_LENGTH))) {
527 			/*
528 			 * The caller requested a short digest. Digest
529 			 * into a scratch buffer and return to
530 			 * the user only what was requested.
531 			 */
532 			SHA2Final(digest_scratch, sha2_ctx);
533 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
534 		} else {
535 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
536 		}
537 	} else {
538 		/*
539 		 * The computed digest will be crossing one or more mblk's.
540 		 * This is bad performance-wise but we need to support it.
541 		 * Allocate a small scratch buffer on the stack and
542 		 * copy it piece meal to the specified digest iovec's.
543 		 */
544 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
545 		off_t scratch_offset = 0;
546 		size_t length = digest_len;
547 		size_t cur_len;
548 
549 		SHA2Final(digest_tmp, sha2_ctx);
550 
551 		while (mp != NULL && length > 0) {
552 			cur_len = MIN(MBLKL(mp) - offset, length);
553 			bcopy(digest_tmp + scratch_offset,
554 			    mp->b_rptr + offset, cur_len);
555 
556 			length -= cur_len;
557 			mp = mp->b_cont;
558 			scratch_offset += cur_len;
559 			offset = 0;
560 		}
561 
562 		if (mp == NULL && length > 0) {
563 			/*
564 			 * The end of the specified mblk was reached but
565 			 * the length requested could not be processed, i.e.
566 			 * The caller requested to digest more data than it
567 			 * provided.
568 			 */
569 			return (CRYPTO_DATA_LEN_RANGE);
570 		}
571 	}
572 
573 	return (CRYPTO_SUCCESS);
574 }
575 
576 /* ARGSUSED */
577 static int
578 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
579     crypto_req_handle_t req)
580 {
581 	int ret = CRYPTO_SUCCESS;
582 	uint_t sha_digest_len;
583 
584 	ASSERT(ctx->cc_provider_private != NULL);
585 
586 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
587 	case SHA256_MECH_INFO_TYPE:
588 		sha_digest_len = SHA256_DIGEST_LENGTH;
589 		break;
590 	case SHA384_MECH_INFO_TYPE:
591 		sha_digest_len = SHA384_DIGEST_LENGTH;
592 		break;
593 	case SHA512_MECH_INFO_TYPE:
594 		sha_digest_len = SHA512_DIGEST_LENGTH;
595 		break;
596 	default:
597 		return (CRYPTO_MECHANISM_INVALID);
598 	}
599 
600 	/*
601 	 * We need to just return the length needed to store the output.
602 	 * We should not destroy the context for the following cases.
603 	 */
604 	if ((digest->cd_length == 0) ||
605 	    (digest->cd_length < sha_digest_len)) {
606 		digest->cd_length = sha_digest_len;
607 		return (CRYPTO_BUFFER_TOO_SMALL);
608 	}
609 
610 	/*
611 	 * Do the SHA2 update on the specified input data.
612 	 */
613 	switch (data->cd_format) {
614 	case CRYPTO_DATA_RAW:
615 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
616 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
617 		    data->cd_length);
618 		break;
619 	case CRYPTO_DATA_UIO:
620 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
621 		    data);
622 		break;
623 	case CRYPTO_DATA_MBLK:
624 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
625 		    data);
626 		break;
627 	default:
628 		ret = CRYPTO_ARGUMENTS_BAD;
629 	}
630 
631 	if (ret != CRYPTO_SUCCESS) {
632 		/* the update failed, free context and bail */
633 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
634 		ctx->cc_provider_private = NULL;
635 		digest->cd_length = 0;
636 		return (ret);
637 	}
638 
639 	/*
640 	 * Do a SHA2 final, must be done separately since the digest
641 	 * type can be different than the input data type.
642 	 */
643 	switch (digest->cd_format) {
644 	case CRYPTO_DATA_RAW:
645 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
646 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
647 		break;
648 	case CRYPTO_DATA_UIO:
649 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
650 		    digest, sha_digest_len, NULL);
651 		break;
652 	case CRYPTO_DATA_MBLK:
653 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
654 		    digest, sha_digest_len, NULL);
655 		break;
656 	default:
657 		ret = CRYPTO_ARGUMENTS_BAD;
658 	}
659 
660 	/* all done, free context and return */
661 
662 	if (ret == CRYPTO_SUCCESS)
663 		digest->cd_length = sha_digest_len;
664 	else
665 		digest->cd_length = 0;
666 
667 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
668 	ctx->cc_provider_private = NULL;
669 	return (ret);
670 }
671 
672 /* ARGSUSED */
673 static int
674 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
675     crypto_req_handle_t req)
676 {
677 	int ret = CRYPTO_SUCCESS;
678 
679 	ASSERT(ctx->cc_provider_private != NULL);
680 
681 	/*
682 	 * Do the SHA2 update on the specified input data.
683 	 */
684 	switch (data->cd_format) {
685 	case CRYPTO_DATA_RAW:
686 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
687 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
688 		    data->cd_length);
689 		break;
690 	case CRYPTO_DATA_UIO:
691 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
692 		    data);
693 		break;
694 	case CRYPTO_DATA_MBLK:
695 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
696 		    data);
697 		break;
698 	default:
699 		ret = CRYPTO_ARGUMENTS_BAD;
700 	}
701 
702 	return (ret);
703 }
704 
705 /* ARGSUSED */
706 static int
707 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
708     crypto_req_handle_t req)
709 {
710 	int ret = CRYPTO_SUCCESS;
711 	uint_t sha_digest_len;
712 
713 	ASSERT(ctx->cc_provider_private != NULL);
714 
715 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
716 	case SHA256_MECH_INFO_TYPE:
717 		sha_digest_len = SHA256_DIGEST_LENGTH;
718 		break;
719 	case SHA384_MECH_INFO_TYPE:
720 		sha_digest_len = SHA384_DIGEST_LENGTH;
721 		break;
722 	case SHA512_MECH_INFO_TYPE:
723 		sha_digest_len = SHA512_DIGEST_LENGTH;
724 		break;
725 	default:
726 		return (CRYPTO_MECHANISM_INVALID);
727 	}
728 
729 	/*
730 	 * We need to just return the length needed to store the output.
731 	 * We should not destroy the context for the following cases.
732 	 */
733 	if ((digest->cd_length == 0) ||
734 	    (digest->cd_length < sha_digest_len)) {
735 		digest->cd_length = sha_digest_len;
736 		return (CRYPTO_BUFFER_TOO_SMALL);
737 	}
738 
739 	/*
740 	 * Do a SHA2 final.
741 	 */
742 	switch (digest->cd_format) {
743 	case CRYPTO_DATA_RAW:
744 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
745 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
746 		break;
747 	case CRYPTO_DATA_UIO:
748 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
749 		    digest, sha_digest_len, NULL);
750 		break;
751 	case CRYPTO_DATA_MBLK:
752 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
753 		    digest, sha_digest_len, NULL);
754 		break;
755 	default:
756 		ret = CRYPTO_ARGUMENTS_BAD;
757 	}
758 
759 	/* all done, free context and return */
760 
761 	if (ret == CRYPTO_SUCCESS)
762 		digest->cd_length = sha_digest_len;
763 	else
764 		digest->cd_length = 0;
765 
766 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
767 	ctx->cc_provider_private = NULL;
768 
769 	return (ret);
770 }
771 
772 /* ARGSUSED */
773 static int
774 sha2_digest_atomic(crypto_provider_handle_t provider,
775     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
776     crypto_data_t *data, crypto_data_t *digest,
777     crypto_req_handle_t req)
778 {
779 	int ret = CRYPTO_SUCCESS;
780 	SHA2_CTX sha2_ctx;
781 	uint32_t sha_digest_len;
782 
783 	/*
784 	 * Do the SHA inits.
785 	 */
786 
787 	SHA2Init(mechanism->cm_type, &sha2_ctx);
788 
789 	switch (data->cd_format) {
790 	case CRYPTO_DATA_RAW:
791 		SHA2Update(&sha2_ctx, (uint8_t *)data->
792 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
793 		break;
794 	case CRYPTO_DATA_UIO:
795 		ret = sha2_digest_update_uio(&sha2_ctx, data);
796 		break;
797 	case CRYPTO_DATA_MBLK:
798 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
799 		break;
800 	default:
801 		ret = CRYPTO_ARGUMENTS_BAD;
802 	}
803 
804 	/*
805 	 * Do the SHA updates on the specified input data.
806 	 */
807 
808 	if (ret != CRYPTO_SUCCESS) {
809 		/* the update failed, bail */
810 		digest->cd_length = 0;
811 		return (ret);
812 	}
813 
814 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
815 		sha_digest_len = SHA256_DIGEST_LENGTH;
816 	else
817 		sha_digest_len = SHA512_DIGEST_LENGTH;
818 
819 	/*
820 	 * Do a SHA2 final, must be done separately since the digest
821 	 * type can be different than the input data type.
822 	 */
823 	switch (digest->cd_format) {
824 	case CRYPTO_DATA_RAW:
825 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
826 		    digest->cd_offset, &sha2_ctx);
827 		break;
828 	case CRYPTO_DATA_UIO:
829 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
830 		    sha_digest_len, NULL);
831 		break;
832 	case CRYPTO_DATA_MBLK:
833 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
834 		    sha_digest_len, NULL);
835 		break;
836 	default:
837 		ret = CRYPTO_ARGUMENTS_BAD;
838 	}
839 
840 	if (ret == CRYPTO_SUCCESS)
841 		digest->cd_length = sha_digest_len;
842 	else
843 		digest->cd_length = 0;
844 
845 	return (ret);
846 }
847 
848 /*
849  * KCF software provider mac entry points.
850  *
851  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
852  *
853  * Init:
854  * The initialization routine initializes what we denote
855  * as the inner and outer contexts by doing
856  * - for inner context: SHA2(key XOR ipad)
857  * - for outer context: SHA2(key XOR opad)
858  *
859  * Update:
860  * Each subsequent SHA2 HMAC update will result in an
861  * update of the inner context with the specified data.
862  *
863  * Final:
864  * The SHA2 HMAC final will do a SHA2 final operation on the
865  * inner context, and the resulting digest will be used
866  * as the data for an update on the outer context. Last
867  * but not least, a SHA2 final on the outer context will
868  * be performed to obtain the SHA2 HMAC digest to return
869  * to the user.
870  */
871 
872 /*
873  * Initialize a SHA2-HMAC context.
874  */
875 static void
876 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
877 {
878 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
879 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
880 	int i, block_size, blocks_per_int64;
881 
882 	/* Determine the block size */
883 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
884 		block_size = SHA256_HMAC_BLOCK_SIZE;
885 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
886 	} else {
887 		block_size = SHA512_HMAC_BLOCK_SIZE;
888 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
889 	}
890 
891 	(void) bzero(ipad, block_size);
892 	(void) bzero(opad, block_size);
893 	(void) bcopy(keyval, ipad, length_in_bytes);
894 	(void) bcopy(keyval, opad, length_in_bytes);
895 
896 	/* XOR key with ipad (0x36) and opad (0x5c) */
897 	for (i = 0; i < blocks_per_int64; i ++) {
898 		ipad[i] ^= 0x3636363636363636;
899 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
900 	}
901 
902 	/* perform SHA2 on ipad */
903 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
904 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
905 
906 	/* perform SHA2 on opad */
907 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
908 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
909 
910 }
911 
912 /*
913  */
914 static int
915 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
916     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
917     crypto_req_handle_t req)
918 {
919 	int ret = CRYPTO_SUCCESS;
920 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
921 	uint_t sha_digest_len, sha_hmac_block_size;
922 
923 	/*
924 	 * Set the digest length and block size to values approriate to the
925 	 * mechanism
926 	 */
927 	switch (mechanism->cm_type) {
928 	case SHA256_HMAC_MECH_INFO_TYPE:
929 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
930 		sha_digest_len = SHA256_DIGEST_LENGTH;
931 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
932 		break;
933 	case SHA384_HMAC_MECH_INFO_TYPE:
934 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
935 	case SHA512_HMAC_MECH_INFO_TYPE:
936 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
937 		sha_digest_len = SHA512_DIGEST_LENGTH;
938 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
939 		break;
940 	default:
941 		return (CRYPTO_MECHANISM_INVALID);
942 	}
943 
944 	if (key->ck_format != CRYPTO_KEY_RAW)
945 		return (CRYPTO_ARGUMENTS_BAD);
946 
947 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
948 	    crypto_kmflag(req));
949 	if (ctx->cc_provider_private == NULL)
950 		return (CRYPTO_HOST_MEMORY);
951 
952 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
953 	if (ctx_template != NULL) {
954 		/* reuse context template */
955 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
956 		    sizeof (sha2_hmac_ctx_t));
957 	} else {
958 		/* no context template, compute context */
959 		if (keylen_in_bytes > sha_hmac_block_size) {
960 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
961 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
962 
963 			/*
964 			 * Hash the passed-in key to get a smaller key.
965 			 * The inner context is used since it hasn't been
966 			 * initialized yet.
967 			 */
968 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
969 			    &hmac_ctx->hc_icontext,
970 			    key->ck_data, keylen_in_bytes, digested_key);
971 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
972 			    digested_key, sha_digest_len);
973 		} else {
974 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
975 			    key->ck_data, keylen_in_bytes);
976 		}
977 	}
978 
979 	/*
980 	 * Get the mechanism parameters, if applicable.
981 	 */
982 	if (mechanism->cm_type % 3 == 2) {
983 		if (mechanism->cm_param == NULL ||
984 		    mechanism->cm_param_len != sizeof (ulong_t))
985 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
986 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
987 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
988 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
989 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
990 	}
991 
992 	if (ret != CRYPTO_SUCCESS) {
993 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
994 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
995 		ctx->cc_provider_private = NULL;
996 	}
997 
998 	return (ret);
999 }
1000 
1001 /* ARGSUSED */
1002 static int
1003 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1004     crypto_req_handle_t req)
1005 {
1006 	int ret = CRYPTO_SUCCESS;
1007 
1008 	ASSERT(ctx->cc_provider_private != NULL);
1009 
1010 	/*
1011 	 * Do a SHA2 update of the inner context using the specified
1012 	 * data.
1013 	 */
1014 	switch (data->cd_format) {
1015 	case CRYPTO_DATA_RAW:
1016 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1017 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1018 		    data->cd_length);
1019 		break;
1020 	case CRYPTO_DATA_UIO:
1021 		ret = sha2_digest_update_uio(
1022 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1023 		break;
1024 	case CRYPTO_DATA_MBLK:
1025 		ret = sha2_digest_update_mblk(
1026 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1027 		break;
1028 	default:
1029 		ret = CRYPTO_ARGUMENTS_BAD;
1030 	}
1031 
1032 	return (ret);
1033 }
1034 
1035 /* ARGSUSED */
1036 static int
1037 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1038 {
1039 	int ret = CRYPTO_SUCCESS;
1040 	uchar_t digest[SHA512_DIGEST_LENGTH];
1041 	uint32_t digest_len, sha_digest_len;
1042 
1043 	ASSERT(ctx->cc_provider_private != NULL);
1044 
1045 	/* Set the digest lengths to values approriate to the mechanism */
1046 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1047 	case SHA256_HMAC_MECH_INFO_TYPE:
1048 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1049 		break;
1050 	case SHA384_HMAC_MECH_INFO_TYPE:
1051 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1052 		break;
1053 	case SHA512_HMAC_MECH_INFO_TYPE:
1054 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1055 		break;
1056 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1057 		sha_digest_len = SHA256_DIGEST_LENGTH;
1058 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1059 		break;
1060 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1061 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1062 		sha_digest_len = SHA512_DIGEST_LENGTH;
1063 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1064 		break;
1065 	}
1066 
1067 	/*
1068 	 * We need to just return the length needed to store the output.
1069 	 * We should not destroy the context for the following cases.
1070 	 */
1071 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1072 		mac->cd_length = digest_len;
1073 		return (CRYPTO_BUFFER_TOO_SMALL);
1074 	}
1075 
1076 	/*
1077 	 * Do a SHA2 final on the inner context.
1078 	 */
1079 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1080 
1081 	/*
1082 	 * Do a SHA2 update on the outer context, feeding the inner
1083 	 * digest as data.
1084 	 */
1085 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1086 	    sha_digest_len);
1087 
1088 	/*
1089 	 * Do a SHA2 final on the outer context, storing the computing
1090 	 * digest in the users buffer.
1091 	 */
1092 	switch (mac->cd_format) {
1093 	case CRYPTO_DATA_RAW:
1094 		if (digest_len != sha_digest_len) {
1095 			/*
1096 			 * The caller requested a short digest. Digest
1097 			 * into a scratch buffer and return to
1098 			 * the user only what was requested.
1099 			 */
1100 			SHA2Final(digest,
1101 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1102 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1103 			    mac->cd_offset, digest_len);
1104 		} else {
1105 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1106 			    mac->cd_offset,
1107 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1108 		}
1109 		break;
1110 	case CRYPTO_DATA_UIO:
1111 		ret = sha2_digest_final_uio(
1112 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1113 		    digest_len, digest);
1114 		break;
1115 	case CRYPTO_DATA_MBLK:
1116 		ret = sha2_digest_final_mblk(
1117 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1118 		    digest_len, digest);
1119 		break;
1120 	default:
1121 		ret = CRYPTO_ARGUMENTS_BAD;
1122 	}
1123 
1124 	if (ret == CRYPTO_SUCCESS)
1125 		mac->cd_length = digest_len;
1126 	else
1127 		mac->cd_length = 0;
1128 
1129 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1130 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1131 	ctx->cc_provider_private = NULL;
1132 
1133 	return (ret);
1134 }
1135 
1136 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1137 	switch (data->cd_format) {					\
1138 	case CRYPTO_DATA_RAW:						\
1139 		SHA2Update(&(ctx).hc_icontext,				\
1140 		    (uint8_t *)data->cd_raw.iov_base +			\
1141 		    data->cd_offset, data->cd_length);			\
1142 		break;							\
1143 	case CRYPTO_DATA_UIO:						\
1144 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1145 		break;							\
1146 	case CRYPTO_DATA_MBLK:						\
1147 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1148 		    data);						\
1149 		break;							\
1150 	default:							\
1151 		ret = CRYPTO_ARGUMENTS_BAD;				\
1152 	}								\
1153 }
1154 
1155 /* ARGSUSED */
1156 static int
1157 sha2_mac_atomic(crypto_provider_handle_t provider,
1158     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1159     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1160     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1161 {
1162 	int ret = CRYPTO_SUCCESS;
1163 	uchar_t digest[SHA512_DIGEST_LENGTH];
1164 	sha2_hmac_ctx_t sha2_hmac_ctx;
1165 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1166 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1167 
1168 	/*
1169 	 * Set the digest length and block size to values approriate to the
1170 	 * mechanism
1171 	 */
1172 	switch (mechanism->cm_type) {
1173 	case SHA256_HMAC_MECH_INFO_TYPE:
1174 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1175 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1176 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1177 		break;
1178 	case SHA384_HMAC_MECH_INFO_TYPE:
1179 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1180 	case SHA512_HMAC_MECH_INFO_TYPE:
1181 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1182 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1183 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1184 		break;
1185 	default:
1186 		return (CRYPTO_MECHANISM_INVALID);
1187 	}
1188 
1189 	/* Add support for key by attributes (RFE 4706552) */
1190 	if (key->ck_format != CRYPTO_KEY_RAW)
1191 		return (CRYPTO_ARGUMENTS_BAD);
1192 
1193 	if (ctx_template != NULL) {
1194 		/* reuse context template */
1195 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1196 	} else {
1197 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1198 		/* no context template, initialize context */
1199 		if (keylen_in_bytes > sha_hmac_block_size) {
1200 			/*
1201 			 * Hash the passed-in key to get a smaller key.
1202 			 * The inner context is used since it hasn't been
1203 			 * initialized yet.
1204 			 */
1205 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1206 			    &sha2_hmac_ctx.hc_icontext,
1207 			    key->ck_data, keylen_in_bytes, digest);
1208 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1209 			    sha_digest_len);
1210 		} else {
1211 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1212 			    keylen_in_bytes);
1213 		}
1214 	}
1215 
1216 	/* get the mechanism parameters, if applicable */
1217 	if ((mechanism->cm_type % 3) == 2) {
1218 		if (mechanism->cm_param == NULL ||
1219 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1220 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1221 			goto bail;
1222 		}
1223 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1224 		if (digest_len > sha_digest_len) {
1225 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1226 			goto bail;
1227 		}
1228 	}
1229 
1230 	/* do a SHA2 update of the inner context using the specified data */
1231 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1232 	if (ret != CRYPTO_SUCCESS)
1233 		/* the update failed, free context and bail */
1234 		goto bail;
1235 
1236 	/*
1237 	 * Do a SHA2 final on the inner context.
1238 	 */
1239 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1240 
1241 	/*
1242 	 * Do an SHA2 update on the outer context, feeding the inner
1243 	 * digest as data.
1244 	 *
1245 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1246 	 * bytes of the inner hash value.
1247 	 */
1248 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1249 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1250 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1251 		    SHA384_DIGEST_LENGTH);
1252 	else
1253 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1254 
1255 	/*
1256 	 * Do a SHA2 final on the outer context, storing the computed
1257 	 * digest in the users buffer.
1258 	 */
1259 	switch (mac->cd_format) {
1260 	case CRYPTO_DATA_RAW:
1261 		if (digest_len != sha_digest_len) {
1262 			/*
1263 			 * The caller requested a short digest. Digest
1264 			 * into a scratch buffer and return to
1265 			 * the user only what was requested.
1266 			 */
1267 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1268 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1269 			    mac->cd_offset, digest_len);
1270 		} else {
1271 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1272 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1273 		}
1274 		break;
1275 	case CRYPTO_DATA_UIO:
1276 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1277 		    digest_len, digest);
1278 		break;
1279 	case CRYPTO_DATA_MBLK:
1280 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1281 		    digest_len, digest);
1282 		break;
1283 	default:
1284 		ret = CRYPTO_ARGUMENTS_BAD;
1285 	}
1286 
1287 	if (ret == CRYPTO_SUCCESS) {
1288 		mac->cd_length = digest_len;
1289 		return (CRYPTO_SUCCESS);
1290 	}
1291 bail:
1292 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1293 	mac->cd_length = 0;
1294 	return (ret);
1295 }
1296 
1297 /* ARGSUSED */
1298 static int
1299 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1300     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1301     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1302     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1303 {
1304 	int ret = CRYPTO_SUCCESS;
1305 	uchar_t digest[SHA512_DIGEST_LENGTH];
1306 	sha2_hmac_ctx_t sha2_hmac_ctx;
1307 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1308 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1309 
1310 	/*
1311 	 * Set the digest length and block size to values approriate to the
1312 	 * mechanism
1313 	 */
1314 	switch (mechanism->cm_type) {
1315 	case SHA256_HMAC_MECH_INFO_TYPE:
1316 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1317 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1318 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1319 		break;
1320 	case SHA384_HMAC_MECH_INFO_TYPE:
1321 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1322 	case SHA512_HMAC_MECH_INFO_TYPE:
1323 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1324 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1325 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1326 		break;
1327 	default:
1328 		return (CRYPTO_MECHANISM_INVALID);
1329 	}
1330 
1331 	/* Add support for key by attributes (RFE 4706552) */
1332 	if (key->ck_format != CRYPTO_KEY_RAW)
1333 		return (CRYPTO_ARGUMENTS_BAD);
1334 
1335 	if (ctx_template != NULL) {
1336 		/* reuse context template */
1337 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1338 	} else {
1339 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1340 		/* no context template, initialize context */
1341 		if (keylen_in_bytes > sha_hmac_block_size) {
1342 			/*
1343 			 * Hash the passed-in key to get a smaller key.
1344 			 * The inner context is used since it hasn't been
1345 			 * initialized yet.
1346 			 */
1347 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1348 			    &sha2_hmac_ctx.hc_icontext,
1349 			    key->ck_data, keylen_in_bytes, digest);
1350 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1351 			    sha_digest_len);
1352 		} else {
1353 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1354 			    keylen_in_bytes);
1355 		}
1356 	}
1357 
1358 	/* get the mechanism parameters, if applicable */
1359 	if (mechanism->cm_type % 3 == 2) {
1360 		if (mechanism->cm_param == NULL ||
1361 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1362 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1363 			goto bail;
1364 		}
1365 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1366 		if (digest_len > sha_digest_len) {
1367 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1368 			goto bail;
1369 		}
1370 	}
1371 
1372 	if (mac->cd_length != digest_len) {
1373 		ret = CRYPTO_INVALID_MAC;
1374 		goto bail;
1375 	}
1376 
1377 	/* do a SHA2 update of the inner context using the specified data */
1378 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1379 	if (ret != CRYPTO_SUCCESS)
1380 		/* the update failed, free context and bail */
1381 		goto bail;
1382 
1383 	/* do a SHA2 final on the inner context */
1384 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1385 
1386 	/*
1387 	 * Do an SHA2 update on the outer context, feeding the inner
1388 	 * digest as data.
1389 	 *
1390 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1391 	 * bytes of the inner hash value.
1392 	 */
1393 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1394 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1395 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1396 		    SHA384_DIGEST_LENGTH);
1397 	else
1398 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1399 
1400 	/*
1401 	 * Do a SHA2 final on the outer context, storing the computed
1402 	 * digest in the users buffer.
1403 	 */
1404 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1405 
1406 	/*
1407 	 * Compare the computed digest against the expected digest passed
1408 	 * as argument.
1409 	 */
1410 
1411 	switch (mac->cd_format) {
1412 
1413 	case CRYPTO_DATA_RAW:
1414 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1415 		    mac->cd_offset, digest_len) != 0)
1416 			ret = CRYPTO_INVALID_MAC;
1417 		break;
1418 
1419 	case CRYPTO_DATA_UIO: {
1420 		off_t offset = mac->cd_offset;
1421 		uint_t vec_idx;
1422 		off_t scratch_offset = 0;
1423 		size_t length = digest_len;
1424 		size_t cur_len;
1425 
1426 		/* we support only kernel buffer */
1427 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1428 			return (CRYPTO_ARGUMENTS_BAD);
1429 
1430 		/* jump to the first iovec containing the expected digest */
1431 		for (vec_idx = 0;
1432 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1433 		    vec_idx < mac->cd_uio->uio_iovcnt;
1434 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1435 			;
1436 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
1437 			/*
1438 			 * The caller specified an offset that is
1439 			 * larger than the total size of the buffers
1440 			 * it provided.
1441 			 */
1442 			ret = CRYPTO_DATA_LEN_RANGE;
1443 			break;
1444 		}
1445 
1446 		/* do the comparison of computed digest vs specified one */
1447 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1448 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1449 			    offset, length);
1450 
1451 			if (bcmp(digest + scratch_offset,
1452 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1453 			    cur_len) != 0) {
1454 				ret = CRYPTO_INVALID_MAC;
1455 				break;
1456 			}
1457 
1458 			length -= cur_len;
1459 			vec_idx++;
1460 			scratch_offset += cur_len;
1461 			offset = 0;
1462 		}
1463 		break;
1464 	}
1465 
1466 	case CRYPTO_DATA_MBLK: {
1467 		off_t offset = mac->cd_offset;
1468 		mblk_t *mp;
1469 		off_t scratch_offset = 0;
1470 		size_t length = digest_len;
1471 		size_t cur_len;
1472 
1473 		/* jump to the first mblk_t containing the expected digest */
1474 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1475 		    offset -= MBLKL(mp), mp = mp->b_cont)
1476 			;
1477 		if (mp == NULL) {
1478 			/*
1479 			 * The caller specified an offset that is larger than
1480 			 * the total size of the buffers it provided.
1481 			 */
1482 			ret = CRYPTO_DATA_LEN_RANGE;
1483 			break;
1484 		}
1485 
1486 		while (mp != NULL && length > 0) {
1487 			cur_len = MIN(MBLKL(mp) - offset, length);
1488 			if (bcmp(digest + scratch_offset,
1489 			    mp->b_rptr + offset, cur_len) != 0) {
1490 				ret = CRYPTO_INVALID_MAC;
1491 				break;
1492 			}
1493 
1494 			length -= cur_len;
1495 			mp = mp->b_cont;
1496 			scratch_offset += cur_len;
1497 			offset = 0;
1498 		}
1499 		break;
1500 	}
1501 
1502 	default:
1503 		ret = CRYPTO_ARGUMENTS_BAD;
1504 	}
1505 
1506 	return (ret);
1507 bail:
1508 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1509 	mac->cd_length = 0;
1510 	return (ret);
1511 }
1512 
1513 /*
1514  * KCF software provider context management entry points.
1515  */
1516 
1517 /* ARGSUSED */
1518 static int
1519 sha2_create_ctx_template(crypto_provider_handle_t provider,
1520     crypto_mechanism_t *mechanism, crypto_key_t *key,
1521     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1522     crypto_req_handle_t req)
1523 {
1524 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1525 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1526 	uint32_t sha_digest_len, sha_hmac_block_size;
1527 
1528 	/*
1529 	 * Set the digest length and block size to values approriate to the
1530 	 * mechanism
1531 	 */
1532 	switch (mechanism->cm_type) {
1533 	case SHA256_HMAC_MECH_INFO_TYPE:
1534 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1535 		sha_digest_len = SHA256_DIGEST_LENGTH;
1536 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1537 		break;
1538 	case SHA384_HMAC_MECH_INFO_TYPE:
1539 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1540 	case SHA512_HMAC_MECH_INFO_TYPE:
1541 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1542 		sha_digest_len = SHA512_DIGEST_LENGTH;
1543 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1544 		break;
1545 	default:
1546 		return (CRYPTO_MECHANISM_INVALID);
1547 	}
1548 
1549 	/* Add support for key by attributes (RFE 4706552) */
1550 	if (key->ck_format != CRYPTO_KEY_RAW)
1551 		return (CRYPTO_ARGUMENTS_BAD);
1552 
1553 	/*
1554 	 * Allocate and initialize SHA2 context.
1555 	 */
1556 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1557 	    crypto_kmflag(req));
1558 	if (sha2_hmac_ctx_tmpl == NULL)
1559 		return (CRYPTO_HOST_MEMORY);
1560 
1561 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1562 
1563 	if (keylen_in_bytes > sha_hmac_block_size) {
1564 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1565 
1566 		/*
1567 		 * Hash the passed-in key to get a smaller key.
1568 		 * The inner context is used since it hasn't been
1569 		 * initialized yet.
1570 		 */
1571 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1572 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1573 		    key->ck_data, keylen_in_bytes, digested_key);
1574 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1575 		    sha_digest_len);
1576 	} else {
1577 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1578 		    keylen_in_bytes);
1579 	}
1580 
1581 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1582 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1583 
1584 	return (CRYPTO_SUCCESS);
1585 }
1586 
1587 static int
1588 sha2_free_context(crypto_ctx_t *ctx)
1589 {
1590 	uint_t ctx_len;
1591 
1592 	if (ctx->cc_provider_private == NULL)
1593 		return (CRYPTO_SUCCESS);
1594 
1595 	/*
1596 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1597 	 * have different lengths.
1598 	 *
1599 	 * Note: Below is dependent on the mechanism ordering.
1600 	 */
1601 
1602 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1603 		ctx_len = sizeof (sha2_ctx_t);
1604 	else
1605 		ctx_len = sizeof (sha2_hmac_ctx_t);
1606 
1607 	bzero(ctx->cc_provider_private, ctx_len);
1608 	kmem_free(ctx->cc_provider_private, ctx_len);
1609 	ctx->cc_provider_private = NULL;
1610 
1611 	return (CRYPTO_SUCCESS);
1612 }
1613 
1614 /*
1615  * SHA-2 Power-Up Self-Test
1616  */
1617 void
1618 sha2_POST(int *rc)
1619 {
1620 
1621 	*rc = fips_sha2_post();
1622 
1623 }
1624