xref: /illumos-gate/usr/src/uts/common/crypto/io/sha2_mod.c (revision 3c573fcc51430b02603f62713f3f5d1b0b1aed1c)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 
27 #include <sys/modctl.h>
28 #include <sys/cmn_err.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/strsun.h>
32 #include <sys/systm.h>
33 #include <sys/sysmacros.h>
34 #define	_SHA2_IMPL
35 #include <sys/sha2.h>
36 #include <sha2/sha2_impl.h>
37 
38 /*
39  * The sha2 module is created with two modlinkages:
40  * - a modlmisc that allows consumers to directly call the entry points
41  *   SHA2Init, SHA2Update, and SHA2Final.
42  * - a modlcrypto that allows the module to register with the Kernel
43  *   Cryptographic Framework (KCF) as a software provider for the SHA2
44  *   mechanisms.
45  */
46 
47 static struct modlmisc modlmisc = {
48 	&mod_miscops,
49 	"SHA2 Message-Digest Algorithm"
50 };
51 
52 static struct modlcrypto modlcrypto = {
53 	&mod_cryptoops,
54 	"SHA2 Kernel SW Provider"
55 };
56 
57 static struct modlinkage modlinkage = {
58 	MODREV_1, &modlmisc, &modlcrypto, NULL
59 };
60 
61 /*
62  * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
63  * by KCF to one of the entry points.
64  */
65 
66 #define	PROV_SHA2_CTX(ctx)	((sha2_ctx_t *)(ctx)->cc_provider_private)
67 #define	PROV_SHA2_HMAC_CTX(ctx)	((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
68 
69 /* to extract the digest length passed as mechanism parameter */
70 #define	PROV_SHA2_GET_DIGEST_LEN(m, len) {				\
71 	if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t)))		\
72 		(len) = (uint32_t)*((ulong_t *)(m)->cm_param);	\
73 	else {								\
74 		ulong_t tmp_ulong;					\
75 		bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t));	\
76 		(len) = (uint32_t)tmp_ulong;				\
77 	}								\
78 }
79 
80 #define	PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) {	\
81 	SHA2Init(mech, ctx);				\
82 	SHA2Update(ctx, key, len);			\
83 	SHA2Final(digest, ctx);				\
84 }
85 
86 /*
87  * Mechanism info structure passed to KCF during registration.
88  */
89 static crypto_mech_info_t sha2_mech_info_tab[] = {
90 	/* SHA256 */
91 	{SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
92 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
93 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
94 	/* SHA256-HMAC */
95 	{SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
96 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
97 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
98 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 	/* SHA256-HMAC GENERAL */
100 	{SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
101 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
102 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
103 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
104 	/* SHA384 */
105 	{SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
106 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
107 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
108 	/* SHA384-HMAC */
109 	{SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
110 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
111 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
112 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
113 	/* SHA384-HMAC GENERAL */
114 	{SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
115 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
116 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
117 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
118 	/* SHA512 */
119 	{SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
120 	    CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
121 	    0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
122 	/* SHA512-HMAC */
123 	{SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
124 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
125 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
126 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES},
127 	/* SHA512-HMAC GENERAL */
128 	{SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
129 	    CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
130 	    SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
131 	    CRYPTO_KEYSIZE_UNIT_IN_BYTES}
132 };
133 
134 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
135 
136 static crypto_control_ops_t sha2_control_ops = {
137 	sha2_provider_status
138 };
139 
140 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
141     crypto_req_handle_t);
142 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
143     crypto_req_handle_t);
144 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
145     crypto_req_handle_t);
146 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
147     crypto_req_handle_t);
148 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
149     crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
150     crypto_req_handle_t);
151 
152 static crypto_digest_ops_t sha2_digest_ops = {
153 	sha2_digest_init,
154 	sha2_digest,
155 	sha2_digest_update,
156 	NULL,
157 	sha2_digest_final,
158 	sha2_digest_atomic
159 };
160 
161 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
162     crypto_spi_ctx_template_t, crypto_req_handle_t);
163 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
164     crypto_req_handle_t);
165 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
166 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
167     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
168     crypto_spi_ctx_template_t, crypto_req_handle_t);
169 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
170     crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
171     crypto_spi_ctx_template_t, crypto_req_handle_t);
172 
173 static crypto_mac_ops_t sha2_mac_ops = {
174 	sha2_mac_init,
175 	NULL,
176 	sha2_mac_update,
177 	sha2_mac_final,
178 	sha2_mac_atomic,
179 	sha2_mac_verify_atomic
180 };
181 
182 static int sha2_create_ctx_template(crypto_provider_handle_t,
183     crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
184     size_t *, crypto_req_handle_t);
185 static int sha2_free_context(crypto_ctx_t *);
186 
187 static crypto_ctx_ops_t sha2_ctx_ops = {
188 	sha2_create_ctx_template,
189 	sha2_free_context
190 };
191 
192 static void sha2_POST(int *);
193 
194 static crypto_fips140_ops_t sha2_fips140_ops = {
195 	sha2_POST
196 };
197 
198 static crypto_ops_t sha2_crypto_ops = {
199 	&sha2_control_ops,
200 	&sha2_digest_ops,
201 	NULL,
202 	&sha2_mac_ops,
203 	NULL,
204 	NULL,
205 	NULL,
206 	NULL,
207 	NULL,
208 	NULL,
209 	NULL,
210 	NULL,
211 	NULL,
212 	&sha2_ctx_ops,
213 	NULL,
214 	NULL,
215 	&sha2_fips140_ops
216 };
217 
218 static crypto_provider_info_t sha2_prov_info = {
219 	CRYPTO_SPI_VERSION_4,
220 	"SHA2 Software Provider",
221 	CRYPTO_SW_PROVIDER,
222 	{&modlinkage},
223 	NULL,
224 	&sha2_crypto_ops,
225 	sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
226 	sha2_mech_info_tab
227 };
228 
229 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
230 
231 int
232 _init()
233 {
234 	int ret;
235 
236 	if ((ret = mod_install(&modlinkage)) != 0)
237 		return (ret);
238 
239 	/*
240 	 * Register with KCF. If the registration fails, log an
241 	 * error but do not uninstall the module, since the functionality
242 	 * provided by misc/sha2 should still be available.
243 	 */
244 	if ((ret = crypto_register_provider(&sha2_prov_info,
245 	    &sha2_prov_handle)) != CRYPTO_SUCCESS)
246 		cmn_err(CE_WARN, "sha2 _init: "
247 		    "crypto_register_provider() failed (0x%x)", ret);
248 
249 	return (0);
250 }
251 
252 int
253 _info(struct modinfo *modinfop)
254 {
255 	return (mod_info(&modlinkage, modinfop));
256 }
257 
258 /*
259  * KCF software provider control entry points.
260  */
261 /* ARGSUSED */
262 static void
263 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
264 {
265 	*status = CRYPTO_PROVIDER_READY;
266 }
267 
268 /*
269  * KCF software provider digest entry points.
270  */
271 
272 static int
273 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
274     crypto_req_handle_t req)
275 {
276 
277 	/*
278 	 * Allocate and initialize SHA2 context.
279 	 */
280 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
281 	    crypto_kmflag(req));
282 	if (ctx->cc_provider_private == NULL)
283 		return (CRYPTO_HOST_MEMORY);
284 
285 	PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
286 	SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
287 
288 	return (CRYPTO_SUCCESS);
289 }
290 
291 /*
292  * Helper SHA2 digest update function for uio data.
293  */
294 static int
295 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
296 {
297 	off_t offset = data->cd_offset;
298 	size_t length = data->cd_length;
299 	uint_t vec_idx;
300 	size_t cur_len;
301 
302 	/* we support only kernel buffer */
303 	if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
304 		return (CRYPTO_ARGUMENTS_BAD);
305 
306 	/*
307 	 * Jump to the first iovec containing data to be
308 	 * digested.
309 	 */
310 	for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
311 	    offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
312 	    offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
313 		;
314 	if (vec_idx == data->cd_uio->uio_iovcnt) {
315 		/*
316 		 * The caller specified an offset that is larger than the
317 		 * total size of the buffers it provided.
318 		 */
319 		return (CRYPTO_DATA_LEN_RANGE);
320 	}
321 
322 	/*
323 	 * Now do the digesting on the iovecs.
324 	 */
325 	while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
326 		cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
327 		    offset, length);
328 
329 		SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
330 		    uio_iov[vec_idx].iov_base + offset, cur_len);
331 		length -= cur_len;
332 		vec_idx++;
333 		offset = 0;
334 	}
335 
336 	if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
337 		/*
338 		 * The end of the specified iovec's was reached but
339 		 * the length requested could not be processed, i.e.
340 		 * The caller requested to digest more data than it provided.
341 		 */
342 		return (CRYPTO_DATA_LEN_RANGE);
343 	}
344 
345 	return (CRYPTO_SUCCESS);
346 }
347 
348 /*
349  * Helper SHA2 digest final function for uio data.
350  * digest_len is the length of the desired digest. If digest_len
351  * is smaller than the default SHA2 digest length, the caller
352  * must pass a scratch buffer, digest_scratch, which must
353  * be at least the algorithm's digest length bytes.
354  */
355 static int
356 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
357     ulong_t digest_len, uchar_t *digest_scratch)
358 {
359 	off_t offset = digest->cd_offset;
360 	uint_t vec_idx;
361 
362 	/* we support only kernel buffer */
363 	if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
364 		return (CRYPTO_ARGUMENTS_BAD);
365 
366 	/*
367 	 * Jump to the first iovec containing ptr to the digest to
368 	 * be returned.
369 	 */
370 	for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
371 	    vec_idx < digest->cd_uio->uio_iovcnt;
372 	    offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
373 		;
374 	if (vec_idx == digest->cd_uio->uio_iovcnt) {
375 		/*
376 		 * The caller specified an offset that is
377 		 * larger than the total size of the buffers
378 		 * it provided.
379 		 */
380 		return (CRYPTO_DATA_LEN_RANGE);
381 	}
382 
383 	if (offset + digest_len <=
384 	    digest->cd_uio->uio_iov[vec_idx].iov_len) {
385 		/*
386 		 * The computed SHA2 digest will fit in the current
387 		 * iovec.
388 		 */
389 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
390 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
391 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
392 		    (digest_len != SHA512_DIGEST_LENGTH))) {
393 			/*
394 			 * The caller requested a short digest. Digest
395 			 * into a scratch buffer and return to
396 			 * the user only what was requested.
397 			 */
398 			SHA2Final(digest_scratch, sha2_ctx);
399 
400 			bcopy(digest_scratch, (uchar_t *)digest->
401 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
402 			    digest_len);
403 		} else {
404 			SHA2Final((uchar_t *)digest->
405 			    cd_uio->uio_iov[vec_idx].iov_base + offset,
406 			    sha2_ctx);
407 
408 		}
409 	} else {
410 		/*
411 		 * The computed digest will be crossing one or more iovec's.
412 		 * This is bad performance-wise but we need to support it.
413 		 * Allocate a small scratch buffer on the stack and
414 		 * copy it piece meal to the specified digest iovec's.
415 		 */
416 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
417 		off_t scratch_offset = 0;
418 		size_t length = digest_len;
419 		size_t cur_len;
420 
421 		SHA2Final(digest_tmp, sha2_ctx);
422 
423 		while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
424 			cur_len =
425 			    MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
426 			    offset, length);
427 			bcopy(digest_tmp + scratch_offset,
428 			    digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
429 			    cur_len);
430 
431 			length -= cur_len;
432 			vec_idx++;
433 			scratch_offset += cur_len;
434 			offset = 0;
435 		}
436 
437 		if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
438 			/*
439 			 * The end of the specified iovec's was reached but
440 			 * the length requested could not be processed, i.e.
441 			 * The caller requested to digest more data than it
442 			 * provided.
443 			 */
444 			return (CRYPTO_DATA_LEN_RANGE);
445 		}
446 	}
447 
448 	return (CRYPTO_SUCCESS);
449 }
450 
451 /*
452  * Helper SHA2 digest update for mblk's.
453  */
454 static int
455 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
456 {
457 	off_t offset = data->cd_offset;
458 	size_t length = data->cd_length;
459 	mblk_t *mp;
460 	size_t cur_len;
461 
462 	/*
463 	 * Jump to the first mblk_t containing data to be digested.
464 	 */
465 	for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
466 	    offset -= MBLKL(mp), mp = mp->b_cont)
467 		;
468 	if (mp == NULL) {
469 		/*
470 		 * The caller specified an offset that is larger than the
471 		 * total size of the buffers it provided.
472 		 */
473 		return (CRYPTO_DATA_LEN_RANGE);
474 	}
475 
476 	/*
477 	 * Now do the digesting on the mblk chain.
478 	 */
479 	while (mp != NULL && length > 0) {
480 		cur_len = MIN(MBLKL(mp) - offset, length);
481 		SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
482 		length -= cur_len;
483 		offset = 0;
484 		mp = mp->b_cont;
485 	}
486 
487 	if (mp == NULL && length > 0) {
488 		/*
489 		 * The end of the mblk was reached but the length requested
490 		 * could not be processed, i.e. The caller requested
491 		 * to digest more data than it provided.
492 		 */
493 		return (CRYPTO_DATA_LEN_RANGE);
494 	}
495 
496 	return (CRYPTO_SUCCESS);
497 }
498 
499 /*
500  * Helper SHA2 digest final for mblk's.
501  * digest_len is the length of the desired digest. If digest_len
502  * is smaller than the default SHA2 digest length, the caller
503  * must pass a scratch buffer, digest_scratch, which must
504  * be at least the algorithm's digest length bytes.
505  */
506 static int
507 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
508     ulong_t digest_len, uchar_t *digest_scratch)
509 {
510 	off_t offset = digest->cd_offset;
511 	mblk_t *mp;
512 
513 	/*
514 	 * Jump to the first mblk_t that will be used to store the digest.
515 	 */
516 	for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
517 	    offset -= MBLKL(mp), mp = mp->b_cont)
518 		;
519 	if (mp == NULL) {
520 		/*
521 		 * The caller specified an offset that is larger than the
522 		 * total size of the buffers it provided.
523 		 */
524 		return (CRYPTO_DATA_LEN_RANGE);
525 	}
526 
527 	if (offset + digest_len <= MBLKL(mp)) {
528 		/*
529 		 * The computed SHA2 digest will fit in the current mblk.
530 		 * Do the SHA2Final() in-place.
531 		 */
532 		if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
533 		    (digest_len != SHA256_DIGEST_LENGTH)) ||
534 		    ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
535 		    (digest_len != SHA512_DIGEST_LENGTH))) {
536 			/*
537 			 * The caller requested a short digest. Digest
538 			 * into a scratch buffer and return to
539 			 * the user only what was requested.
540 			 */
541 			SHA2Final(digest_scratch, sha2_ctx);
542 			bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
543 		} else {
544 			SHA2Final(mp->b_rptr + offset, sha2_ctx);
545 		}
546 	} else {
547 		/*
548 		 * The computed digest will be crossing one or more mblk's.
549 		 * This is bad performance-wise but we need to support it.
550 		 * Allocate a small scratch buffer on the stack and
551 		 * copy it piece meal to the specified digest iovec's.
552 		 */
553 		uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
554 		off_t scratch_offset = 0;
555 		size_t length = digest_len;
556 		size_t cur_len;
557 
558 		SHA2Final(digest_tmp, sha2_ctx);
559 
560 		while (mp != NULL && length > 0) {
561 			cur_len = MIN(MBLKL(mp) - offset, length);
562 			bcopy(digest_tmp + scratch_offset,
563 			    mp->b_rptr + offset, cur_len);
564 
565 			length -= cur_len;
566 			mp = mp->b_cont;
567 			scratch_offset += cur_len;
568 			offset = 0;
569 		}
570 
571 		if (mp == NULL && length > 0) {
572 			/*
573 			 * The end of the specified mblk was reached but
574 			 * the length requested could not be processed, i.e.
575 			 * The caller requested to digest more data than it
576 			 * provided.
577 			 */
578 			return (CRYPTO_DATA_LEN_RANGE);
579 		}
580 	}
581 
582 	return (CRYPTO_SUCCESS);
583 }
584 
585 /* ARGSUSED */
586 static int
587 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
588     crypto_req_handle_t req)
589 {
590 	int ret = CRYPTO_SUCCESS;
591 	uint_t sha_digest_len;
592 
593 	ASSERT(ctx->cc_provider_private != NULL);
594 
595 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
596 	case SHA256_MECH_INFO_TYPE:
597 		sha_digest_len = SHA256_DIGEST_LENGTH;
598 		break;
599 	case SHA384_MECH_INFO_TYPE:
600 		sha_digest_len = SHA384_DIGEST_LENGTH;
601 		break;
602 	case SHA512_MECH_INFO_TYPE:
603 		sha_digest_len = SHA512_DIGEST_LENGTH;
604 		break;
605 	default:
606 		return (CRYPTO_MECHANISM_INVALID);
607 	}
608 
609 	/*
610 	 * We need to just return the length needed to store the output.
611 	 * We should not destroy the context for the following cases.
612 	 */
613 	if ((digest->cd_length == 0) ||
614 	    (digest->cd_length < sha_digest_len)) {
615 		digest->cd_length = sha_digest_len;
616 		return (CRYPTO_BUFFER_TOO_SMALL);
617 	}
618 
619 	/*
620 	 * Do the SHA2 update on the specified input data.
621 	 */
622 	switch (data->cd_format) {
623 	case CRYPTO_DATA_RAW:
624 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
625 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
626 		    data->cd_length);
627 		break;
628 	case CRYPTO_DATA_UIO:
629 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
630 		    data);
631 		break;
632 	case CRYPTO_DATA_MBLK:
633 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
634 		    data);
635 		break;
636 	default:
637 		ret = CRYPTO_ARGUMENTS_BAD;
638 	}
639 
640 	if (ret != CRYPTO_SUCCESS) {
641 		/* the update failed, free context and bail */
642 		kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
643 		ctx->cc_provider_private = NULL;
644 		digest->cd_length = 0;
645 		return (ret);
646 	}
647 
648 	/*
649 	 * Do a SHA2 final, must be done separately since the digest
650 	 * type can be different than the input data type.
651 	 */
652 	switch (digest->cd_format) {
653 	case CRYPTO_DATA_RAW:
654 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
655 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
656 		break;
657 	case CRYPTO_DATA_UIO:
658 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
659 		    digest, sha_digest_len, NULL);
660 		break;
661 	case CRYPTO_DATA_MBLK:
662 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
663 		    digest, sha_digest_len, NULL);
664 		break;
665 	default:
666 		ret = CRYPTO_ARGUMENTS_BAD;
667 	}
668 
669 	/* all done, free context and return */
670 
671 	if (ret == CRYPTO_SUCCESS)
672 		digest->cd_length = sha_digest_len;
673 	else
674 		digest->cd_length = 0;
675 
676 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
677 	ctx->cc_provider_private = NULL;
678 	return (ret);
679 }
680 
681 /* ARGSUSED */
682 static int
683 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
684     crypto_req_handle_t req)
685 {
686 	int ret = CRYPTO_SUCCESS;
687 
688 	ASSERT(ctx->cc_provider_private != NULL);
689 
690 	/*
691 	 * Do the SHA2 update on the specified input data.
692 	 */
693 	switch (data->cd_format) {
694 	case CRYPTO_DATA_RAW:
695 		SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
696 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
697 		    data->cd_length);
698 		break;
699 	case CRYPTO_DATA_UIO:
700 		ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
701 		    data);
702 		break;
703 	case CRYPTO_DATA_MBLK:
704 		ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
705 		    data);
706 		break;
707 	default:
708 		ret = CRYPTO_ARGUMENTS_BAD;
709 	}
710 
711 	return (ret);
712 }
713 
714 /* ARGSUSED */
715 static int
716 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
717     crypto_req_handle_t req)
718 {
719 	int ret = CRYPTO_SUCCESS;
720 	uint_t sha_digest_len;
721 
722 	ASSERT(ctx->cc_provider_private != NULL);
723 
724 	switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
725 	case SHA256_MECH_INFO_TYPE:
726 		sha_digest_len = SHA256_DIGEST_LENGTH;
727 		break;
728 	case SHA384_MECH_INFO_TYPE:
729 		sha_digest_len = SHA384_DIGEST_LENGTH;
730 		break;
731 	case SHA512_MECH_INFO_TYPE:
732 		sha_digest_len = SHA512_DIGEST_LENGTH;
733 		break;
734 	default:
735 		return (CRYPTO_MECHANISM_INVALID);
736 	}
737 
738 	/*
739 	 * We need to just return the length needed to store the output.
740 	 * We should not destroy the context for the following cases.
741 	 */
742 	if ((digest->cd_length == 0) ||
743 	    (digest->cd_length < sha_digest_len)) {
744 		digest->cd_length = sha_digest_len;
745 		return (CRYPTO_BUFFER_TOO_SMALL);
746 	}
747 
748 	/*
749 	 * Do a SHA2 final.
750 	 */
751 	switch (digest->cd_format) {
752 	case CRYPTO_DATA_RAW:
753 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
754 		    digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
755 		break;
756 	case CRYPTO_DATA_UIO:
757 		ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
758 		    digest, sha_digest_len, NULL);
759 		break;
760 	case CRYPTO_DATA_MBLK:
761 		ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
762 		    digest, sha_digest_len, NULL);
763 		break;
764 	default:
765 		ret = CRYPTO_ARGUMENTS_BAD;
766 	}
767 
768 	/* all done, free context and return */
769 
770 	if (ret == CRYPTO_SUCCESS)
771 		digest->cd_length = sha_digest_len;
772 	else
773 		digest->cd_length = 0;
774 
775 	kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
776 	ctx->cc_provider_private = NULL;
777 
778 	return (ret);
779 }
780 
781 /* ARGSUSED */
782 static int
783 sha2_digest_atomic(crypto_provider_handle_t provider,
784     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
785     crypto_data_t *data, crypto_data_t *digest,
786     crypto_req_handle_t req)
787 {
788 	int ret = CRYPTO_SUCCESS;
789 	SHA2_CTX sha2_ctx;
790 	uint32_t sha_digest_len;
791 
792 	/*
793 	 * Do the SHA inits.
794 	 */
795 
796 	SHA2Init(mechanism->cm_type, &sha2_ctx);
797 
798 	switch (data->cd_format) {
799 	case CRYPTO_DATA_RAW:
800 		SHA2Update(&sha2_ctx, (uint8_t *)data->
801 		    cd_raw.iov_base + data->cd_offset, data->cd_length);
802 		break;
803 	case CRYPTO_DATA_UIO:
804 		ret = sha2_digest_update_uio(&sha2_ctx, data);
805 		break;
806 	case CRYPTO_DATA_MBLK:
807 		ret = sha2_digest_update_mblk(&sha2_ctx, data);
808 		break;
809 	default:
810 		ret = CRYPTO_ARGUMENTS_BAD;
811 	}
812 
813 	/*
814 	 * Do the SHA updates on the specified input data.
815 	 */
816 
817 	if (ret != CRYPTO_SUCCESS) {
818 		/* the update failed, bail */
819 		digest->cd_length = 0;
820 		return (ret);
821 	}
822 
823 	if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
824 		sha_digest_len = SHA256_DIGEST_LENGTH;
825 	else
826 		sha_digest_len = SHA512_DIGEST_LENGTH;
827 
828 	/*
829 	 * Do a SHA2 final, must be done separately since the digest
830 	 * type can be different than the input data type.
831 	 */
832 	switch (digest->cd_format) {
833 	case CRYPTO_DATA_RAW:
834 		SHA2Final((unsigned char *)digest->cd_raw.iov_base +
835 		    digest->cd_offset, &sha2_ctx);
836 		break;
837 	case CRYPTO_DATA_UIO:
838 		ret = sha2_digest_final_uio(&sha2_ctx, digest,
839 		    sha_digest_len, NULL);
840 		break;
841 	case CRYPTO_DATA_MBLK:
842 		ret = sha2_digest_final_mblk(&sha2_ctx, digest,
843 		    sha_digest_len, NULL);
844 		break;
845 	default:
846 		ret = CRYPTO_ARGUMENTS_BAD;
847 	}
848 
849 	if (ret == CRYPTO_SUCCESS)
850 		digest->cd_length = sha_digest_len;
851 	else
852 		digest->cd_length = 0;
853 
854 	return (ret);
855 }
856 
857 /*
858  * KCF software provider mac entry points.
859  *
860  * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
861  *
862  * Init:
863  * The initialization routine initializes what we denote
864  * as the inner and outer contexts by doing
865  * - for inner context: SHA2(key XOR ipad)
866  * - for outer context: SHA2(key XOR opad)
867  *
868  * Update:
869  * Each subsequent SHA2 HMAC update will result in an
870  * update of the inner context with the specified data.
871  *
872  * Final:
873  * The SHA2 HMAC final will do a SHA2 final operation on the
874  * inner context, and the resulting digest will be used
875  * as the data for an update on the outer context. Last
876  * but not least, a SHA2 final on the outer context will
877  * be performed to obtain the SHA2 HMAC digest to return
878  * to the user.
879  */
880 
881 /*
882  * Initialize a SHA2-HMAC context.
883  */
884 static void
885 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
886 {
887 	uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
888 	uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
889 	int i, block_size, blocks_per_int64;
890 
891 	/* Determine the block size */
892 	if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
893 		block_size = SHA256_HMAC_BLOCK_SIZE;
894 		blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
895 	} else {
896 		block_size = SHA512_HMAC_BLOCK_SIZE;
897 		blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
898 	}
899 
900 	(void) bzero(ipad, block_size);
901 	(void) bzero(opad, block_size);
902 	(void) bcopy(keyval, ipad, length_in_bytes);
903 	(void) bcopy(keyval, opad, length_in_bytes);
904 
905 	/* XOR key with ipad (0x36) and opad (0x5c) */
906 	for (i = 0; i < blocks_per_int64; i ++) {
907 		ipad[i] ^= 0x3636363636363636;
908 		opad[i] ^= 0x5c5c5c5c5c5c5c5c;
909 	}
910 
911 	/* perform SHA2 on ipad */
912 	SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
913 	SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
914 
915 	/* perform SHA2 on opad */
916 	SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
917 	SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
918 
919 }
920 
921 /*
922  */
923 static int
924 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
925     crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
926     crypto_req_handle_t req)
927 {
928 	int ret = CRYPTO_SUCCESS;
929 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
930 	uint_t sha_digest_len, sha_hmac_block_size;
931 
932 	/*
933 	 * Set the digest length and block size to values approriate to the
934 	 * mechanism
935 	 */
936 	switch (mechanism->cm_type) {
937 	case SHA256_HMAC_MECH_INFO_TYPE:
938 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
939 		sha_digest_len = SHA256_DIGEST_LENGTH;
940 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
941 		break;
942 	case SHA384_HMAC_MECH_INFO_TYPE:
943 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
944 	case SHA512_HMAC_MECH_INFO_TYPE:
945 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
946 		sha_digest_len = SHA512_DIGEST_LENGTH;
947 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
948 		break;
949 	default:
950 		return (CRYPTO_MECHANISM_INVALID);
951 	}
952 
953 	if (key->ck_format != CRYPTO_KEY_RAW)
954 		return (CRYPTO_ARGUMENTS_BAD);
955 
956 	ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
957 	    crypto_kmflag(req));
958 	if (ctx->cc_provider_private == NULL)
959 		return (CRYPTO_HOST_MEMORY);
960 
961 	PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
962 	if (ctx_template != NULL) {
963 		/* reuse context template */
964 		bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
965 		    sizeof (sha2_hmac_ctx_t));
966 	} else {
967 		/* no context template, compute context */
968 		if (keylen_in_bytes > sha_hmac_block_size) {
969 			uchar_t digested_key[SHA512_DIGEST_LENGTH];
970 			sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
971 
972 			/*
973 			 * Hash the passed-in key to get a smaller key.
974 			 * The inner context is used since it hasn't been
975 			 * initialized yet.
976 			 */
977 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
978 			    &hmac_ctx->hc_icontext,
979 			    key->ck_data, keylen_in_bytes, digested_key);
980 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
981 			    digested_key, sha_digest_len);
982 		} else {
983 			sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
984 			    key->ck_data, keylen_in_bytes);
985 		}
986 	}
987 
988 	/*
989 	 * Get the mechanism parameters, if applicable.
990 	 */
991 	if (mechanism->cm_type % 3 == 2) {
992 		if (mechanism->cm_param == NULL ||
993 		    mechanism->cm_param_len != sizeof (ulong_t))
994 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
995 		PROV_SHA2_GET_DIGEST_LEN(mechanism,
996 		    PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
997 		if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
998 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
999 	}
1000 
1001 	if (ret != CRYPTO_SUCCESS) {
1002 		bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1003 		kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1004 		ctx->cc_provider_private = NULL;
1005 	}
1006 
1007 	return (ret);
1008 }
1009 
1010 /* ARGSUSED */
1011 static int
1012 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1013     crypto_req_handle_t req)
1014 {
1015 	int ret = CRYPTO_SUCCESS;
1016 
1017 	ASSERT(ctx->cc_provider_private != NULL);
1018 
1019 	/*
1020 	 * Do a SHA2 update of the inner context using the specified
1021 	 * data.
1022 	 */
1023 	switch (data->cd_format) {
1024 	case CRYPTO_DATA_RAW:
1025 		SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1026 		    (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1027 		    data->cd_length);
1028 		break;
1029 	case CRYPTO_DATA_UIO:
1030 		ret = sha2_digest_update_uio(
1031 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1032 		break;
1033 	case CRYPTO_DATA_MBLK:
1034 		ret = sha2_digest_update_mblk(
1035 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1036 		break;
1037 	default:
1038 		ret = CRYPTO_ARGUMENTS_BAD;
1039 	}
1040 
1041 	return (ret);
1042 }
1043 
1044 /* ARGSUSED */
1045 static int
1046 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1047 {
1048 	int ret = CRYPTO_SUCCESS;
1049 	uchar_t digest[SHA512_DIGEST_LENGTH];
1050 	uint32_t digest_len, sha_digest_len;
1051 
1052 	ASSERT(ctx->cc_provider_private != NULL);
1053 
1054 	/* Set the digest lengths to values approriate to the mechanism */
1055 	switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1056 	case SHA256_HMAC_MECH_INFO_TYPE:
1057 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1058 		break;
1059 	case SHA384_HMAC_MECH_INFO_TYPE:
1060 		sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1061 		break;
1062 	case SHA512_HMAC_MECH_INFO_TYPE:
1063 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1064 		break;
1065 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1066 		sha_digest_len = SHA256_DIGEST_LENGTH;
1067 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1068 		break;
1069 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1070 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1071 		sha_digest_len = SHA512_DIGEST_LENGTH;
1072 		digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1073 		break;
1074 	}
1075 
1076 	/*
1077 	 * We need to just return the length needed to store the output.
1078 	 * We should not destroy the context for the following cases.
1079 	 */
1080 	if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1081 		mac->cd_length = digest_len;
1082 		return (CRYPTO_BUFFER_TOO_SMALL);
1083 	}
1084 
1085 	/*
1086 	 * Do a SHA2 final on the inner context.
1087 	 */
1088 	SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1089 
1090 	/*
1091 	 * Do a SHA2 update on the outer context, feeding the inner
1092 	 * digest as data.
1093 	 */
1094 	SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1095 	    sha_digest_len);
1096 
1097 	/*
1098 	 * Do a SHA2 final on the outer context, storing the computing
1099 	 * digest in the users buffer.
1100 	 */
1101 	switch (mac->cd_format) {
1102 	case CRYPTO_DATA_RAW:
1103 		if (digest_len != sha_digest_len) {
1104 			/*
1105 			 * The caller requested a short digest. Digest
1106 			 * into a scratch buffer and return to
1107 			 * the user only what was requested.
1108 			 */
1109 			SHA2Final(digest,
1110 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1111 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1112 			    mac->cd_offset, digest_len);
1113 		} else {
1114 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1115 			    mac->cd_offset,
1116 			    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1117 		}
1118 		break;
1119 	case CRYPTO_DATA_UIO:
1120 		ret = sha2_digest_final_uio(
1121 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1122 		    digest_len, digest);
1123 		break;
1124 	case CRYPTO_DATA_MBLK:
1125 		ret = sha2_digest_final_mblk(
1126 		    &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1127 		    digest_len, digest);
1128 		break;
1129 	default:
1130 		ret = CRYPTO_ARGUMENTS_BAD;
1131 	}
1132 
1133 	if (ret == CRYPTO_SUCCESS)
1134 		mac->cd_length = digest_len;
1135 	else
1136 		mac->cd_length = 0;
1137 
1138 	bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1139 	kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1140 	ctx->cc_provider_private = NULL;
1141 
1142 	return (ret);
1143 }
1144 
1145 #define	SHA2_MAC_UPDATE(data, ctx, ret) {				\
1146 	switch (data->cd_format) {					\
1147 	case CRYPTO_DATA_RAW:						\
1148 		SHA2Update(&(ctx).hc_icontext,				\
1149 		    (uint8_t *)data->cd_raw.iov_base +			\
1150 		    data->cd_offset, data->cd_length);			\
1151 		break;							\
1152 	case CRYPTO_DATA_UIO:						\
1153 		ret = sha2_digest_update_uio(&(ctx).hc_icontext, data);	\
1154 		break;							\
1155 	case CRYPTO_DATA_MBLK:						\
1156 		ret = sha2_digest_update_mblk(&(ctx).hc_icontext,	\
1157 		    data);						\
1158 		break;							\
1159 	default:							\
1160 		ret = CRYPTO_ARGUMENTS_BAD;				\
1161 	}								\
1162 }
1163 
1164 /* ARGSUSED */
1165 static int
1166 sha2_mac_atomic(crypto_provider_handle_t provider,
1167     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1168     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1169     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1170 {
1171 	int ret = CRYPTO_SUCCESS;
1172 	uchar_t digest[SHA512_DIGEST_LENGTH];
1173 	sha2_hmac_ctx_t sha2_hmac_ctx;
1174 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1175 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1176 
1177 	/*
1178 	 * Set the digest length and block size to values approriate to the
1179 	 * mechanism
1180 	 */
1181 	switch (mechanism->cm_type) {
1182 	case SHA256_HMAC_MECH_INFO_TYPE:
1183 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1184 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1185 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1186 		break;
1187 	case SHA384_HMAC_MECH_INFO_TYPE:
1188 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1189 	case SHA512_HMAC_MECH_INFO_TYPE:
1190 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1191 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1192 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1193 		break;
1194 	default:
1195 		return (CRYPTO_MECHANISM_INVALID);
1196 	}
1197 
1198 	/* Add support for key by attributes (RFE 4706552) */
1199 	if (key->ck_format != CRYPTO_KEY_RAW)
1200 		return (CRYPTO_ARGUMENTS_BAD);
1201 
1202 	if (ctx_template != NULL) {
1203 		/* reuse context template */
1204 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1205 	} else {
1206 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1207 		/* no context template, initialize context */
1208 		if (keylen_in_bytes > sha_hmac_block_size) {
1209 			/*
1210 			 * Hash the passed-in key to get a smaller key.
1211 			 * The inner context is used since it hasn't been
1212 			 * initialized yet.
1213 			 */
1214 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1215 			    &sha2_hmac_ctx.hc_icontext,
1216 			    key->ck_data, keylen_in_bytes, digest);
1217 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1218 			    sha_digest_len);
1219 		} else {
1220 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1221 			    keylen_in_bytes);
1222 		}
1223 	}
1224 
1225 	/* get the mechanism parameters, if applicable */
1226 	if ((mechanism->cm_type % 3) == 2) {
1227 		if (mechanism->cm_param == NULL ||
1228 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1229 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1230 			goto bail;
1231 		}
1232 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1233 		if (digest_len > sha_digest_len) {
1234 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1235 			goto bail;
1236 		}
1237 	}
1238 
1239 	/* do a SHA2 update of the inner context using the specified data */
1240 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1241 	if (ret != CRYPTO_SUCCESS)
1242 		/* the update failed, free context and bail */
1243 		goto bail;
1244 
1245 	/*
1246 	 * Do a SHA2 final on the inner context.
1247 	 */
1248 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1249 
1250 	/*
1251 	 * Do an SHA2 update on the outer context, feeding the inner
1252 	 * digest as data.
1253 	 *
1254 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1255 	 * bytes of the inner hash value.
1256 	 */
1257 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1258 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1259 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1260 		    SHA384_DIGEST_LENGTH);
1261 	else
1262 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1263 
1264 	/*
1265 	 * Do a SHA2 final on the outer context, storing the computed
1266 	 * digest in the users buffer.
1267 	 */
1268 	switch (mac->cd_format) {
1269 	case CRYPTO_DATA_RAW:
1270 		if (digest_len != sha_digest_len) {
1271 			/*
1272 			 * The caller requested a short digest. Digest
1273 			 * into a scratch buffer and return to
1274 			 * the user only what was requested.
1275 			 */
1276 			SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1277 			bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1278 			    mac->cd_offset, digest_len);
1279 		} else {
1280 			SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1281 			    mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1282 		}
1283 		break;
1284 	case CRYPTO_DATA_UIO:
1285 		ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1286 		    digest_len, digest);
1287 		break;
1288 	case CRYPTO_DATA_MBLK:
1289 		ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1290 		    digest_len, digest);
1291 		break;
1292 	default:
1293 		ret = CRYPTO_ARGUMENTS_BAD;
1294 	}
1295 
1296 	if (ret == CRYPTO_SUCCESS) {
1297 		mac->cd_length = digest_len;
1298 		return (CRYPTO_SUCCESS);
1299 	}
1300 bail:
1301 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1302 	mac->cd_length = 0;
1303 	return (ret);
1304 }
1305 
1306 /* ARGSUSED */
1307 static int
1308 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1309     crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1310     crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1311     crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1312 {
1313 	int ret = CRYPTO_SUCCESS;
1314 	uchar_t digest[SHA512_DIGEST_LENGTH];
1315 	sha2_hmac_ctx_t sha2_hmac_ctx;
1316 	uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1317 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1318 
1319 	/*
1320 	 * Set the digest length and block size to values approriate to the
1321 	 * mechanism
1322 	 */
1323 	switch (mechanism->cm_type) {
1324 	case SHA256_HMAC_MECH_INFO_TYPE:
1325 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1326 		sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1327 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1328 		break;
1329 	case SHA384_HMAC_MECH_INFO_TYPE:
1330 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1331 	case SHA512_HMAC_MECH_INFO_TYPE:
1332 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1333 		sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1334 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1335 		break;
1336 	default:
1337 		return (CRYPTO_MECHANISM_INVALID);
1338 	}
1339 
1340 	/* Add support for key by attributes (RFE 4706552) */
1341 	if (key->ck_format != CRYPTO_KEY_RAW)
1342 		return (CRYPTO_ARGUMENTS_BAD);
1343 
1344 	if (ctx_template != NULL) {
1345 		/* reuse context template */
1346 		bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1347 	} else {
1348 		sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1349 		/* no context template, initialize context */
1350 		if (keylen_in_bytes > sha_hmac_block_size) {
1351 			/*
1352 			 * Hash the passed-in key to get a smaller key.
1353 			 * The inner context is used since it hasn't been
1354 			 * initialized yet.
1355 			 */
1356 			PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1357 			    &sha2_hmac_ctx.hc_icontext,
1358 			    key->ck_data, keylen_in_bytes, digest);
1359 			sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1360 			    sha_digest_len);
1361 		} else {
1362 			sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1363 			    keylen_in_bytes);
1364 		}
1365 	}
1366 
1367 	/* get the mechanism parameters, if applicable */
1368 	if (mechanism->cm_type % 3 == 2) {
1369 		if (mechanism->cm_param == NULL ||
1370 		    mechanism->cm_param_len != sizeof (ulong_t)) {
1371 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1372 			goto bail;
1373 		}
1374 		PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1375 		if (digest_len > sha_digest_len) {
1376 			ret = CRYPTO_MECHANISM_PARAM_INVALID;
1377 			goto bail;
1378 		}
1379 	}
1380 
1381 	if (mac->cd_length != digest_len) {
1382 		ret = CRYPTO_INVALID_MAC;
1383 		goto bail;
1384 	}
1385 
1386 	/* do a SHA2 update of the inner context using the specified data */
1387 	SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1388 	if (ret != CRYPTO_SUCCESS)
1389 		/* the update failed, free context and bail */
1390 		goto bail;
1391 
1392 	/* do a SHA2 final on the inner context */
1393 	SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1394 
1395 	/*
1396 	 * Do an SHA2 update on the outer context, feeding the inner
1397 	 * digest as data.
1398 	 *
1399 	 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1400 	 * bytes of the inner hash value.
1401 	 */
1402 	if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1403 	    mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1404 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1405 		    SHA384_DIGEST_LENGTH);
1406 	else
1407 		SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1408 
1409 	/*
1410 	 * Do a SHA2 final on the outer context, storing the computed
1411 	 * digest in the users buffer.
1412 	 */
1413 	SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1414 
1415 	/*
1416 	 * Compare the computed digest against the expected digest passed
1417 	 * as argument.
1418 	 */
1419 
1420 	switch (mac->cd_format) {
1421 
1422 	case CRYPTO_DATA_RAW:
1423 		if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1424 		    mac->cd_offset, digest_len) != 0)
1425 			ret = CRYPTO_INVALID_MAC;
1426 		break;
1427 
1428 	case CRYPTO_DATA_UIO: {
1429 		off_t offset = mac->cd_offset;
1430 		uint_t vec_idx;
1431 		off_t scratch_offset = 0;
1432 		size_t length = digest_len;
1433 		size_t cur_len;
1434 
1435 		/* we support only kernel buffer */
1436 		if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1437 			return (CRYPTO_ARGUMENTS_BAD);
1438 
1439 		/* jump to the first iovec containing the expected digest */
1440 		for (vec_idx = 0;
1441 		    offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1442 		    vec_idx < mac->cd_uio->uio_iovcnt;
1443 		    offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1444 			;
1445 		if (vec_idx == mac->cd_uio->uio_iovcnt) {
1446 			/*
1447 			 * The caller specified an offset that is
1448 			 * larger than the total size of the buffers
1449 			 * it provided.
1450 			 */
1451 			ret = CRYPTO_DATA_LEN_RANGE;
1452 			break;
1453 		}
1454 
1455 		/* do the comparison of computed digest vs specified one */
1456 		while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1457 			cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1458 			    offset, length);
1459 
1460 			if (bcmp(digest + scratch_offset,
1461 			    mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1462 			    cur_len) != 0) {
1463 				ret = CRYPTO_INVALID_MAC;
1464 				break;
1465 			}
1466 
1467 			length -= cur_len;
1468 			vec_idx++;
1469 			scratch_offset += cur_len;
1470 			offset = 0;
1471 		}
1472 		break;
1473 	}
1474 
1475 	case CRYPTO_DATA_MBLK: {
1476 		off_t offset = mac->cd_offset;
1477 		mblk_t *mp;
1478 		off_t scratch_offset = 0;
1479 		size_t length = digest_len;
1480 		size_t cur_len;
1481 
1482 		/* jump to the first mblk_t containing the expected digest */
1483 		for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1484 		    offset -= MBLKL(mp), mp = mp->b_cont)
1485 			;
1486 		if (mp == NULL) {
1487 			/*
1488 			 * The caller specified an offset that is larger than
1489 			 * the total size of the buffers it provided.
1490 			 */
1491 			ret = CRYPTO_DATA_LEN_RANGE;
1492 			break;
1493 		}
1494 
1495 		while (mp != NULL && length > 0) {
1496 			cur_len = MIN(MBLKL(mp) - offset, length);
1497 			if (bcmp(digest + scratch_offset,
1498 			    mp->b_rptr + offset, cur_len) != 0) {
1499 				ret = CRYPTO_INVALID_MAC;
1500 				break;
1501 			}
1502 
1503 			length -= cur_len;
1504 			mp = mp->b_cont;
1505 			scratch_offset += cur_len;
1506 			offset = 0;
1507 		}
1508 		break;
1509 	}
1510 
1511 	default:
1512 		ret = CRYPTO_ARGUMENTS_BAD;
1513 	}
1514 
1515 	return (ret);
1516 bail:
1517 	bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1518 	mac->cd_length = 0;
1519 	return (ret);
1520 }
1521 
1522 /*
1523  * KCF software provider context management entry points.
1524  */
1525 
1526 /* ARGSUSED */
1527 static int
1528 sha2_create_ctx_template(crypto_provider_handle_t provider,
1529     crypto_mechanism_t *mechanism, crypto_key_t *key,
1530     crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1531     crypto_req_handle_t req)
1532 {
1533 	sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1534 	uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1535 	uint32_t sha_digest_len, sha_hmac_block_size;
1536 
1537 	/*
1538 	 * Set the digest length and block size to values approriate to the
1539 	 * mechanism
1540 	 */
1541 	switch (mechanism->cm_type) {
1542 	case SHA256_HMAC_MECH_INFO_TYPE:
1543 	case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1544 		sha_digest_len = SHA256_DIGEST_LENGTH;
1545 		sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1546 		break;
1547 	case SHA384_HMAC_MECH_INFO_TYPE:
1548 	case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1549 	case SHA512_HMAC_MECH_INFO_TYPE:
1550 	case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1551 		sha_digest_len = SHA512_DIGEST_LENGTH;
1552 		sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1553 		break;
1554 	default:
1555 		return (CRYPTO_MECHANISM_INVALID);
1556 	}
1557 
1558 	/* Add support for key by attributes (RFE 4706552) */
1559 	if (key->ck_format != CRYPTO_KEY_RAW)
1560 		return (CRYPTO_ARGUMENTS_BAD);
1561 
1562 	/*
1563 	 * Allocate and initialize SHA2 context.
1564 	 */
1565 	sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1566 	    crypto_kmflag(req));
1567 	if (sha2_hmac_ctx_tmpl == NULL)
1568 		return (CRYPTO_HOST_MEMORY);
1569 
1570 	sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1571 
1572 	if (keylen_in_bytes > sha_hmac_block_size) {
1573 		uchar_t digested_key[SHA512_DIGEST_LENGTH];
1574 
1575 		/*
1576 		 * Hash the passed-in key to get a smaller key.
1577 		 * The inner context is used since it hasn't been
1578 		 * initialized yet.
1579 		 */
1580 		PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1581 		    &sha2_hmac_ctx_tmpl->hc_icontext,
1582 		    key->ck_data, keylen_in_bytes, digested_key);
1583 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1584 		    sha_digest_len);
1585 	} else {
1586 		sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1587 		    keylen_in_bytes);
1588 	}
1589 
1590 	*ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1591 	*ctx_template_size = sizeof (sha2_hmac_ctx_t);
1592 
1593 	return (CRYPTO_SUCCESS);
1594 }
1595 
1596 static int
1597 sha2_free_context(crypto_ctx_t *ctx)
1598 {
1599 	uint_t ctx_len;
1600 
1601 	if (ctx->cc_provider_private == NULL)
1602 		return (CRYPTO_SUCCESS);
1603 
1604 	/*
1605 	 * We have to free either SHA2 or SHA2-HMAC contexts, which
1606 	 * have different lengths.
1607 	 *
1608 	 * Note: Below is dependent on the mechanism ordering.
1609 	 */
1610 
1611 	if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1612 		ctx_len = sizeof (sha2_ctx_t);
1613 	else
1614 		ctx_len = sizeof (sha2_hmac_ctx_t);
1615 
1616 	bzero(ctx->cc_provider_private, ctx_len);
1617 	kmem_free(ctx->cc_provider_private, ctx_len);
1618 	ctx->cc_provider_private = NULL;
1619 
1620 	return (CRYPTO_SUCCESS);
1621 }
1622 
1623 /*
1624  * SHA-2 Power-Up Self-Test
1625  */
1626 void
1627 sha2_POST(int *rc)
1628 {
1629 
1630 	*rc = fips_sha2_post();
1631 
1632 }
1633